merge with wrong head
authorSverre Rabbelier <sverre@rabbelier.nl>
Mon, 07 Sep 2009 20:27:37 +0200
changeset 2878 cf4b80992451
parent 2871 e440e94a874b (current diff)
parent 2877 8bbdc95f87f8 (diff)
child 2879 cb0f9b4646aa
merge with wrong head
thirdparty/google_appengine/lib/django/MANIFEST.in
thirdparty/google_appengine/lib/django/django/contrib/flatpages/README.TXT
thirdparty/google_appengine/lib/django/django/contrib/formtools/templates/formtools/form.html
thirdparty/google_appengine/lib/django/django/contrib/formtools/templates/formtools/preview.html
thirdparty/google_appengine/lib/django/django/contrib/redirects/README.TXT
thirdparty/google_appengine/lib/django/django/dispatch/license.txt
thirdparty/google_appengine/lib/django/django/utils/simplejson/LICENSE.txt
thirdparty/google_appengine/lib/django/examples/__init__.py
thirdparty/google_appengine/lib/django/examples/hello/__init__.py
thirdparty/google_appengine/lib/django/examples/hello/urls.py
thirdparty/google_appengine/lib/django/examples/hello/views.py
thirdparty/google_appengine/lib/django/examples/manage.py
thirdparty/google_appengine/lib/django/examples/settings.py
thirdparty/google_appengine/lib/django/examples/urls.py
thirdparty/google_appengine/lib/django/examples/views.py
thirdparty/google_appengine/lib/django/extras/README.TXT
thirdparty/google_appengine/lib/django/extras/django_bash_completion
thirdparty/google_appengine/lib/django/tests/modeltests/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/basic/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/basic/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/choices/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/choices/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/custom_columns/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/custom_columns/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/custom_managers/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/custom_managers/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/custom_methods/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/custom_methods/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/custom_pk/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/custom_pk/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/empty/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/empty/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/field_defaults/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/field_defaults/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/fixtures/fixture1.json
thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/fixtures/fixture2.json
thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/fixtures/fixture2.xml
thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/fixtures/fixture3.xml
thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/fixtures/initial_data.json
thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/generic_relations/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/generic_relations/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/get_latest/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/get_latest/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/get_object_or_404/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/get_object_or_404/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/get_or_create/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/get_or_create/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/invalid_models/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/invalid_models/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/lookup/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/lookup/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2m_and_m2o/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2m_and_m2o/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2m_intermediary/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2m_intermediary/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2m_multiple/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2m_multiple/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2m_recursive/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2m_recursive/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2o_recursive/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2o_recursive/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2o_recursive2/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/m2o_recursive2/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/manipulators/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/manipulators/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/many_to_many/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/many_to_many/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/many_to_one/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/many_to_one/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/many_to_one_null/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/many_to_one_null/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/model_forms/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/model_forms/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/model_inheritance/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/model_inheritance/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/mutually_referential/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/mutually_referential/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/one_to_one/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/one_to_one/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/or_lookups/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/or_lookups/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/ordering/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/ordering/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/pagination/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/pagination/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/properties/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/properties/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/reserved_names/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/reserved_names/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/reverse_lookup/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/reverse_lookup/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/save_delete_hooks/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/save_delete_hooks/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/select_related/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/select_related/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/serializers/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/serializers/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/str/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/str/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/test_client/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/test_client/fixtures/testdata.json
thirdparty/google_appengine/lib/django/tests/modeltests/test_client/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/test_client/urls.py
thirdparty/google_appengine/lib/django/tests/modeltests/test_client/views.py
thirdparty/google_appengine/lib/django/tests/modeltests/transactions/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/transactions/models.py
thirdparty/google_appengine/lib/django/tests/modeltests/validation/__init__.py
thirdparty/google_appengine/lib/django/tests/modeltests/validation/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/bug639/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/bug639/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/bug639/test.jpg
thirdparty/google_appengine/lib/django/tests/regressiontests/bug639/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/cache/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/cache/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/cache/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/datastructures/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/datastructures/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/datastructures/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/dateformat/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/dateformat/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/dateformat/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/db_typecasts/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/db_typecasts/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/db_typecasts/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/defaultfilters/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/defaultfilters/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/defaultfilters/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/tests/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/tests/test_dispatcher.py
thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/tests/test_robustapply.py
thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/tests/test_saferef.py
thirdparty/google_appengine/lib/django/tests/regressiontests/forms/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/forms/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/forms/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/httpwrappers/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/httpwrappers/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/httpwrappers/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/humanize/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/humanize/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/humanize/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/initial_sql_regress/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/initial_sql_regress/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/initial_sql_regress/sql/simple.sql
thirdparty/google_appengine/lib/django/tests/regressiontests/invalid_admin_options/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/invalid_admin_options/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/many_to_one_regress/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/many_to_one_regress/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/markup/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/markup/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/markup/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/null_queries/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/null_queries/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/one_to_one_regress/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/one_to_one_regress/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/serializers_regress/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/serializers_regress/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/serializers_regress/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/string_lookup/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/string_lookup/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/templates/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/templates/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/templates/tests.py
thirdparty/google_appengine/lib/django/tests/regressiontests/templates/urls.py
thirdparty/google_appengine/lib/django/tests/regressiontests/templates/views.py
thirdparty/google_appengine/lib/django/tests/regressiontests/urlpatterns_reverse/__init__.py
thirdparty/google_appengine/lib/django/tests/regressiontests/urlpatterns_reverse/models.py
thirdparty/google_appengine/lib/django/tests/regressiontests/urlpatterns_reverse/tests.py
thirdparty/google_appengine/lib/django/tests/runtests.py
thirdparty/google_appengine/lib/django/tests/templates/404.html
thirdparty/google_appengine/lib/django/tests/templates/500.html
thirdparty/google_appengine/lib/django/tests/templates/login.html
thirdparty/google_appengine/lib/django/tests/urls.py
--- a/.hgtags	Mon Sep 07 20:26:39 2009 +0200
+++ b/.hgtags	Mon Sep 07 20:27:37 2009 +0200
@@ -15,3 +15,4 @@
 bb96d2c3885e9f1413e02a285751578c3c14416a v0-5-20090814
 4027acdbf91d91111ba45e06848ee58554be0d9c v0-5-20090814p1
 776aae4d0499e69d73b5e0c30e04cbd6dcafc7ec v0-5-20090825
+9f7f269383f7dbdb06c0ec94e5cab3c07514c957 v0-5-20090906
--- a/app/app.yaml.template	Mon Sep 07 20:26:39 2009 +0200
+++ b/app/app.yaml.template	Mon Sep 07 20:27:37 2009 +0200
@@ -15,7 +15,7 @@
 # TODO(proto): uncomment and supply a Google App Engine application instance
 # application: FIXME
 # TODO(release): see the instructions in README about the "version:" field
-version: 0-5-20090825
+version: 0-5-20090906
 runtime: python
 api_version: 1
 
--- a/scripts/build.sh	Mon Sep 07 20:26:39 2009 +0200
+++ b/scripts/build.sh	Mon Sep 07 20:27:37 2009 +0200
@@ -89,7 +89,14 @@
 # Create symbolic links.
 for x in $APP_FILES $APP_DIRS $ZIP_FILES
 do
-    ln -s $APP_FOLDER/$x $APP_BUILD/$x
+    if [[ $x != "soc" && $x != "jquery" && $x != "json" ]] ; then
+      ln -s $APP_FOLDER/$x $APP_BUILD/$x
+    else
+      cp -R $APP_FOLDER/$x $APP_BUILD/$x
+    fi
 done
 
+# Run shrinksafe
+bash ../scripts/shrinksafe.sh $APP_BUILD/soc/content/js $APP_BUILD/jquery $APP_BUILD/json
+
 echo "Build results in $APP_BUILD."
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/shrinksafe.sh	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,38 @@
+#!/bin/bash
+#Requires java installed
+
+echo "*** SHRINKSAFE: running shrinksafe ***"
+
+SHRINKSAFE="../thirdparty/shrinksafe/shrinksafe.jar"
+
+echo "*** SHRINKSAFE: minifying javascript files ***"
+let SOURCE_FILE_SIZES=0
+let DEST_FILE_SIZES=0
+
+shrinksafe () {
+  SOURCE_DIR=$1
+  for dir in $(find $SOURCE_DIR -type d); do
+    for i in $(find $dir/*.js -type f); do
+      echo "SHRINKSAFE: Processing $i"
+      CURRENT_SOURCE_FILE_SIZE=$(stat -c%s "$i")
+      let SOURCE_FILE_SIZES=$SOURCE_FILE_SIZES+$CURRENT_SOURCE_FILE_SIZE
+      mv $i $i.old.js
+      java -jar $SHRINKSAFE $i.old.js > $i
+      if [ "$?" == "1" ]; then
+        echo "*** ATTENTION ***: $i minimization failed, copying plain file"
+        cp $i.old.js $i
+      fi
+      rm $i.old.js
+      CURRENT_DEST_FILE_SIZE=$(stat -c%s "$i")
+      let DEST_FILE_SIZES=$DEST_FILE_SIZES+$CURRENT_DEST_FILE_SIZE
+    done
+  done
+}
+
+for DEST_DIR in "$@"; do
+  shrinksafe $DEST_DIR
+done
+
+let COMPRESSION_RATE=$DEST_FILE_SIZES*100/$SOURCE_FILE_SIZES
+echo "*** SHRINKSAFE: Source file sizes: $SOURCE_FILE_SIZES, Dest file sizes: $DEST_FILE_SIZES"
+echo "*** SHRINKSAFE: Congratulations! You achieved $COMPRESSION_RATE% compression rate!"
--- a/thirdparty/google_appengine/RELEASE_NOTES	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/RELEASE_NOTES	Mon Sep 07 20:27:37 2009 +0200
@@ -3,6 +3,48 @@
 
 App Engine Python SDK - Release Notes
 
+Version 1.2.5 - August 13, 2009
+===============================
+  - The Windows Python SDK now includes a GUI launcher, similar to the Mac SDK.
+  - Added XMPP support.
+    http://code.google.com/appengine/docs/python/xmpp
+    http://code.google.com/p/googleappengine/issues/detail?id=231
+  - Datastore now supports multiple writes to the same entity within a 
+    transaction.
+  - Datastore entity key names can now start with a digit.
+      http://code.google.com/p/googleappengine/issues/detail?id=1352
+  - Datastore now supports ancestor + kind queries without a composite index
+      http://code.google.com/p/googleappengine/issues/detail?id=1003
+  - Bulkloader now supports configurationless dump and restore with new
+    --dump and --restore options.
+  - Bulkloader now supports a --dry_run flag to testing data prior to uploading.
+  - Appcfg.py now allows specifying any end date for request_logs.
+  - Urlfetch now allows setting the Referer header.
+      http://code.google.com/p/googleappengine/issues/detail?id=445
+  - Urlfetch stub now correctly handles HEAD requests.
+      http://code.google.com/p/googleappengine/issues/detail?id=866
+  - New remote_api_shell tool for interactive remote_api operations.
+  - New google.ext.ereporter module to collect and email exception reports.
+  - New google.ext.deferred module to execute ad-hoc tasks on the Task Queue.
+
+Version 1.2.4 - July 16, 2009
+=============================
+  - Added support for kindless queries, ie. transaction descendant queries.
+      http://code.google.com/p/googleappengine/issues/detail?id=913
+  - Composite indexes no longer required for certain types of key queries.
+  - Improved exception reporting in the bulkloader.
+  - Datastore transaction RPC sent at beginning of transaction rather than
+    upon first Datastore request.
+  - PolyModel supports keys_only query.
+      http://code.google.com/p/googleappengine/issues/detail?id=1630
+  - Remote API supports more API's (Images, Memcache and URLFetch).
+      http://code.google.com/p/googleappengine/issues/detail?id=1596
+  - Remote API shell.
+  - Support for multiple inheritance for Model and PolyModel.
+  - Enhancement to SearchableModel allowing multiple properties to be
+    indexed.
+  - Various code quality improvements.
+
 Version 1.2.3 - June 1, 2009
 ============================
 
--- a/thirdparty/google_appengine/VERSION	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/VERSION	Mon Sep 07 20:27:37 2009 +0200
@@ -1,3 +1,3 @@
-release: "1.2.3"
-timestamp: 1243913623
+release: "1.2.5"
+timestamp: 1250206498
 api_versions: ['1']
--- a/thirdparty/google_appengine/google/appengine/api/api_base_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/api_base_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -88,18 +88,21 @@
     if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kvalue = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -169,18 +172,21 @@
     if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt32(self.value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kvalue = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -250,18 +256,21 @@
     if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt64(self.value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kvalue = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -330,18 +339,21 @@
     if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatBool(self.value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kvalue = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -410,18 +422,105 @@
     if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormat(self.value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kvalue = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.DOUBLE,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class BytesProto(ProtocolBuffer.ProtocolMessage):
+  has_value_ = 0
+  value_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def value(self): return self.value_
+
+  def set_value(self, x):
+    self.has_value_ = 1
+    self.value_ = x
+
+  def clear_value(self):
+    if self.has_value_:
+      self.has_value_ = 0
+      self.value_ = ""
+
+  def has_value(self): return self.has_value_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_value()): self.set_value(x.value())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_value_ != x.has_value_: return 0
+    if self.has_value_ and self.value_ != x.value_: return 0
+    return 1
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.DOUBLE,
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_value_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: value not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.value_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_value()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.value_)
 
-  )
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_value(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kvalue = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -465,15 +564,19 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 
-__all__ = ['StringProto','Integer32Proto','Integer64Proto','BoolProto','DoubleProto','VoidProto']
+__all__ = ['StringProto','Integer32Proto','Integer64Proto','BoolProto','DoubleProto','BytesProto','VoidProto']
--- a/thirdparty/google_appengine/google/appengine/api/apiproxy_rpc.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/apiproxy_rpc.py	Mon Sep 07 20:27:37 2009 +0200
@@ -64,6 +64,7 @@
     self.callback = callback
     self.deadline = deadline
     self.stub = stub
+    self.cpu_usage_mcycles = 0
 
   def MakeCall(self, package=None, call=None, request=None, response=None,
                callback=None, deadline=None):
--- a/thirdparty/google_appengine/google/appengine/api/apiproxy_stub_map.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/apiproxy_stub_map.py	Mon Sep 07 20:27:37 2009 +0200
@@ -111,7 +111,10 @@
     unique_key = (key, inspect.getmodule(function))
     if unique_key in self.__unique_keys:
       return False
-    self.__content.insert(index, (key, function, service))
+    num_args = len(inspect.getargspec(function)[0])
+    if (inspect.ismethod(function)):
+      num_args -= 1
+    self.__content.insert(index, (key, function, service, num_args))
     self.__unique_keys.add(unique_key)
     return True
 
@@ -150,7 +153,7 @@
     self.__content = []
     self.__unique_keys = set()
 
-  def Call(self, service, call, request, response):
+  def Call(self, service, call, request, response, rpc=None):
     """Invokes all hooks in this collection.
 
     Args:
@@ -158,10 +161,14 @@
       call: string representing which function to call
       request: protocol buffer for the request
       response: protocol buffer for the response
+      rpc: optional RPC used to make this call
     """
-    for key, function, srv in self.__content:
+    for key, function, srv, num_args in self.__content:
       if srv is None or srv == service:
-        function(service, call, request, response)
+        if num_args == 5:
+          function(service, call, request, response, rpc)
+        else:
+          function(service, call, request, response)
 
 
 class APIProxyStubMap(object):
@@ -240,9 +247,17 @@
     """
     stub = self.GetStub(service)
     assert stub, 'No api proxy found for service "%s"' % service
-    self.__precall_hooks.Call(service, call, request, response)
-    stub.MakeSyncCall(service, call, request, response)
-    self.__postcall_hooks.Call(service, call, request, response)
+    if hasattr(stub, 'CreateRPC'):
+      rpc = stub.CreateRPC()
+      self.__precall_hooks.Call(service, call, request, response, rpc)
+      rpc.MakeCall(service, call, request, response)
+      rpc.Wait()
+      rpc.CheckSuccess()
+      self.__postcall_hooks.Call(service, call, request, response, rpc)
+    else:
+      self.__precall_hooks.Call(service, call, request, response)
+      stub.MakeSyncCall(service, call, request, response)
+      self.__postcall_hooks.Call(service, call, request, response)
 
 
 class UserRPC(object):
@@ -385,7 +400,8 @@
     self.__method = method
     self.__get_result_hook = get_result_hook
     self.__user_data = user_data
-    apiproxy.GetPreCallHooks().Call(self.__service, method, request, response)
+    apiproxy.GetPreCallHooks().Call(
+        self.__service, method, request, response, self.__rpc)
     self.__rpc.MakeCall(self.__service, method, request, response)
 
   def wait(self):
@@ -424,7 +440,7 @@
     if not self.__postcall_hooks_called:
       self.__postcall_hooks_called = True
       apiproxy.GetPostCallHooks().Call(self.__service, self.__method,
-                                       self.request, self.response)
+                                       self.request, self.response, self.__rpc)
 
   def get_result(self):
     """Get the result of the RPC, or possibly raise an exception.
--- a/thirdparty/google_appengine/google/appengine/api/app_logging.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/app_logging.py	Mon Sep 07 20:27:37 2009 +0200
@@ -68,7 +68,9 @@
     StreamHandler.emit()."""
     try:
       message = self._AppLogsMessage(record)
-      self.stream.write(message.encode("UTF-8"))
+      if isinstance(message, unicode):
+        message = message.encode("UTF-8")
+      self.stream.write(message)
       self.flush()
     except (KeyboardInterrupt, SystemExit):
       raise
--- a/thirdparty/google_appengine/google/appengine/api/appinfo.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/appinfo.py	Mon Sep 07 20:27:37 2009 +0200
@@ -15,7 +15,7 @@
 # limitations under the License.
 #
 
-"""AppInfo tools
+"""AppInfo tools.
 
 Library for working with AppInfo records in memory, store and load from
 configuration files.
@@ -29,8 +29,8 @@
 
 from google.appengine.api import appinfo_errors
 from google.appengine.api import validation
+from google.appengine.api import yaml_builder
 from google.appengine.api import yaml_listener
-from google.appengine.api import yaml_builder
 from google.appengine.api import yaml_object
 
 
@@ -40,11 +40,13 @@
 _DELTA_REGEX = r'([1-9][0-9]*)([DdHhMm]|[sS]?)'
 _EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX)
 
+_SERVICE_RE_STRING = r'(mail|xmpp_message)'
+
 _EXPIRATION_CONVERSIONS = {
-  'd': 60 * 60 * 24,
-  'h': 60 * 60,
-  'm': 60,
-  's': 1,
+    'd': 60 * 60 * 24,
+    'h': 60 * 60,
+    'm': 60,
+    's': 1,
 }
 
 APP_ID_MAX_LEN = 100
@@ -72,17 +74,17 @@
 
 REQUIRE_MATCHING_FILE = 'require_matching_file'
 
-DEFAULT_SKIP_FILES = (r"^(.*/)?("
-                      r"(app\.yaml)|"
-                      r"(app\.yml)|"
-                      r"(index\.yaml)|"
-                      r"(index\.yml)|"
-                      r"(#.*#)|"
-                      r"(.*~)|"
-                      r"(.*\.py[co])|"
-                      r"(.*/RCS/.*)|"
-                      r"(\..*)|"
-                      r")$")
+DEFAULT_SKIP_FILES = (r'^(.*/)?('
+                      r'(app\.yaml)|'
+                      r'(app\.yml)|'
+                      r'(index\.yaml)|'
+                      r'(index\.yml)|'
+                      r'(#.*#)|'
+                      r'(.*~)|'
+                      r'(.*\.py[co])|'
+                      r'(.*/RCS/.*)|'
+                      r'(\..*)|'
+                      r')$')
 
 LOGIN = 'login'
 SECURE = 'secure'
@@ -101,6 +103,7 @@
 HANDLERS = 'handlers'
 DEFAULT_EXPIRATION = 'default_expiration'
 SKIP_FILES = 'skip_files'
+SERVICES = 'inbound_services'
 
 
 class URLMap(validation.Validated):
@@ -176,42 +179,42 @@
 
   ATTRIBUTES = {
 
-    URL: validation.Optional(_URL_REGEX),
-    LOGIN: validation.Options(LOGIN_OPTIONAL,
-                              LOGIN_REQUIRED,
-                              LOGIN_ADMIN,
-                              default=LOGIN_OPTIONAL),
+      URL: validation.Optional(_URL_REGEX),
+      LOGIN: validation.Options(LOGIN_OPTIONAL,
+                                LOGIN_REQUIRED,
+                                LOGIN_ADMIN,
+                                default=LOGIN_OPTIONAL),
 
-    SECURE: validation.Options(SECURE_HTTP,
-                               SECURE_HTTPS,
-                               SECURE_HTTP_OR_HTTPS,
-                               default=SECURE_HTTP),
+      SECURE: validation.Options(SECURE_HTTP,
+                                 SECURE_HTTPS,
+                                 SECURE_HTTP_OR_HTTPS,
+                                 default=SECURE_HTTP),
 
 
 
-    HANDLER_STATIC_FILES: validation.Optional(_FILES_REGEX),
-    UPLOAD: validation.Optional(_FILES_REGEX),
+      HANDLER_STATIC_FILES: validation.Optional(_FILES_REGEX),
+      UPLOAD: validation.Optional(_FILES_REGEX),
 
 
-    HANDLER_STATIC_DIR: validation.Optional(_FILES_REGEX),
+      HANDLER_STATIC_DIR: validation.Optional(_FILES_REGEX),
 
 
-    MIME_TYPE: validation.Optional(str),
-    EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
+      MIME_TYPE: validation.Optional(str),
+      EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
 
 
-    HANDLER_SCRIPT: validation.Optional(_FILES_REGEX),
+      HANDLER_SCRIPT: validation.Optional(_FILES_REGEX),
 
-    REQUIRE_MATCHING_FILE: validation.Optional(bool),
+      REQUIRE_MATCHING_FILE: validation.Optional(bool),
   }
 
   COMMON_FIELDS = set([URL, LOGIN, SECURE])
 
   ALLOWED_FIELDS = {
-    HANDLER_STATIC_FILES: (MIME_TYPE, UPLOAD, EXPIRATION,
-                           REQUIRE_MATCHING_FILE),
-    HANDLER_STATIC_DIR: (MIME_TYPE, EXPIRATION, REQUIRE_MATCHING_FILE),
-    HANDLER_SCRIPT: (),
+      HANDLER_STATIC_FILES: (MIME_TYPE, UPLOAD, EXPIRATION,
+                             REQUIRE_MATCHING_FILE),
+      HANDLER_STATIC_DIR: (MIME_TYPE, EXPIRATION, REQUIRE_MATCHING_FILE),
+      HANDLER_SCRIPT: (),
   }
 
   def GetHandler(self):
@@ -253,9 +256,9 @@
           not (attribute in allowed_fields or
                attribute in URLMap.COMMON_FIELDS or
                attribute == mapping_type)):
-            raise appinfo_errors.UnexpectedHandlerAttribute(
-                'Unexpected attribute "%s" for mapping type %s.' %
-                (attribute, mapping_type))
+        raise appinfo_errors.UnexpectedHandlerAttribute(
+            'Unexpected attribute "%s" for mapping type %s.' %
+            (attribute, mapping_type))
 
     if mapping_type == HANDLER_STATIC_FILES and not self.upload:
       raise appinfo_errors.MissingHandlerAttribute(
@@ -309,15 +312,18 @@
   ATTRIBUTES = {
 
 
-    APPLICATION: APPLICATION_RE_STRING,
-    VERSION: VERSION_RE_STRING,
-    RUNTIME: RUNTIME_RE_STRING,
+      APPLICATION: APPLICATION_RE_STRING,
+      VERSION: VERSION_RE_STRING,
+      RUNTIME: RUNTIME_RE_STRING,
 
 
-    API_VERSION: API_VERSION_RE_STRING,
-    HANDLERS: validation.Optional(validation.Repeated(URLMap)),
-    DEFAULT_EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
-    SKIP_FILES: validation.RegexStr(default=DEFAULT_SKIP_FILES)
+      API_VERSION: API_VERSION_RE_STRING,
+      HANDLERS: validation.Optional(validation.Repeated(URLMap)),
+
+      SERVICES: validation.Optional(validation.Repeated(
+          validation.Regex(_SERVICE_RE_STRING))),
+      DEFAULT_EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
+      SKIP_FILES: validation.RegexStr(default=DEFAULT_SKIP_FILES)
   }
 
   def CheckInitialized(self):
@@ -349,8 +355,9 @@
     An instance of AppInfoExternal as loaded from a YAML file.
 
   Raises:
-    EmptyConfigurationFile when there are no documents in YAML file.
-    MultipleConfigurationFile when there is more than one document in YAML
+    ValueError: if a specified service is not valid.
+    EmptyConfigurationFile: when there are no documents in YAML file.
+    MultipleConfigurationFile: when there is more than one document in YAML
     file.
   """
   builder = yaml_object.ObjectBuilder(AppInfoExternal)
@@ -386,7 +393,7 @@
 
 _file_path_positive_re = re.compile(r'^[ 0-9a-zA-Z\._\+/\$-]{1,256}$')
 
-_file_path_negative_1_re = re.compile(r'\.\.|^\./|\.$|/\./|^-')
+_file_path_negative_1_re = re.compile(r'\.\.|^\./|\.$|/\./|^-|^_ah/')
 
 _file_path_negative_2_re = re.compile(r'//|/$')
 
@@ -413,7 +420,8 @@
   if _file_path_positive_re.match(filename) is None:
     return 'Invalid character in filename: %s' % filename
   if _file_path_negative_1_re.search(filename) is not None:
-    return ('Filename cannot contain "." or ".." or start with "-": %s' %
+    return ('Filename cannot contain "." or ".." '
+            'or start with "-" or "_ah/": %s' %
             filename)
   if _file_path_negative_2_re.search(filename) is not None:
     return 'Filename cannot have trailing / or contain //: %s' % filename
--- a/thirdparty/google_appengine/google/appengine/api/capabilities/capability_service_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/capabilities/capability_service_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -159,26 +159,27 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kpackage = 1
   kcapability = 2
   kcall = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "package",
-   "capability",
-   "call",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "package",
+    2: "capability",
+    3: "call",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -337,26 +338,27 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   ksummary_status = 1
   ktime_until_scheduled = 2
   kconfig = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "summary_status",
-   "time_until_scheduled",
-   "config",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "summary_status",
+    2: "time_until_scheduled",
+    3: "config",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/datastore.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore.py	Mon Sep 07 20:27:37 2009 +0200
@@ -49,12 +49,19 @@
 from google.appengine.runtime import apiproxy_errors
 from google.appengine.datastore import entity_pb
 
+try:
+  from google.appengine.api.labs.taskqueue import taskqueue_service_pb
+except ImportError:
+  from google.appengine.api.taskqueue import taskqueue_service_pb
+
 MAX_ALLOWABLE_QUERIES = 30
 
 DEFAULT_TRANSACTION_RETRIES = 3
 
 _MAX_INDEXED_PROPERTIES = 5000
 
+_MAX_ID_BATCH_SIZE = 1000 * 1000 * 1000
+
 Key = datastore_types.Key
 typename = datastore_types.typename
 
@@ -147,7 +154,7 @@
     return []
 
   for entity in entities:
-    if not entity.kind() or not entity.app():
+    if not entity.kind() or not entity.app_id_namespace():
       raise datastore_errors.BadRequestError(
           'App and kind must not be empty, in entity: %s' % entity)
 
@@ -156,8 +163,6 @@
 
   keys = [e.key() for e in entities]
   tx = _MaybeSetupTransaction(req, keys)
-  if tx:
-    tx.RecordModifiedKeys([k for k in keys if k.has_id_or_name()])
 
   resp = datastore_pb.PutResponse()
   try:
@@ -177,7 +182,6 @@
     entity._Entity__key._Key__reference.CopyFrom(key)
 
   if tx:
-    tx.RecordModifiedKeys([e.key() for e in entities], error_on_repeat=False)
     tx.entity_group = entities[0].entity_group()
 
   if multiple:
@@ -259,8 +263,6 @@
   req.key_list().extend([key._Key__reference for key in keys])
 
   tx = _MaybeSetupTransaction(req, keys)
-  if tx:
-    tx.RecordModifiedKeys(keys)
 
   resp = datastore_pb.DeleteResponse()
   try:
@@ -275,8 +277,8 @@
   Includes read-only accessors for app id, kind, and primary key. Also
   provides dictionary-style access to properties.
   """
-  def __init__(self, kind, parent=None, _app=None, name=None,
-               unindexed_properties=[]):
+  def __init__(self, kind, parent=None, _app=None, name=None, id=None,
+               unindexed_properties=[], _namespace=None):
     """Constructor. Takes the kind and transaction root, which cannot be
     changed after the entity is constructed, and an optional parent. Raises
     BadArgumentError or BadKeyError if kind is invalid or parent is not an
@@ -289,33 +291,41 @@
       parent: Entity or Key
       # if provided, this entity's name.
       name: string
+      # if provided, this entity's id.
+      id: integer
       # if provided, a sequence of property names that should not be indexed
       # by the built-in single property indices.
       unindexed_properties: list or tuple of strings
     """
     ref = entity_pb.Reference()
-    _app = datastore_types.ResolveAppId(_app)
-    ref.set_app(_app)
+    _app_namespace = datastore_types.ResolveAppIdNamespace(_app, _namespace)
+    ref.set_app(_app_namespace.to_encoded())
 
     datastore_types.ValidateString(kind, 'kind',
                                    datastore_errors.BadArgumentError)
-
     if parent is not None:
       parent = _GetCompleteKeyOrError(parent)
-      if _app != parent.app():
+      if _app_namespace != parent.app_id_namespace():
         raise datastore_errors.BadArgumentError(
-            "_app %s doesn't match parent's app %s" % (_app, parent.app()))
+            " %s doesn't match parent's app_namespace %s" %
+            (_app_namespace, parent.app_id_namespace()))
       ref.CopyFrom(parent._Key__reference)
 
     last_path = ref.mutable_path().add_element()
     last_path.set_type(kind.encode('utf-8'))
 
+    if name is not None and id is not None:
+      raise datastore_errors.BadArgumentError(
+          "Cannot set both name and id on an Entity")
+
     if name is not None:
       datastore_types.ValidateString(name, 'name')
-      if name[0] in string.digits:
-        raise datastore_errors.BadValueError('name cannot begin with a digit')
       last_path.set_name(name.encode('utf-8'))
 
+    if id is not None:
+      datastore_types.ValidateInteger(id, 'id')
+      last_path.set_id(id)
+
     unindexed_properties, multiple = NormalizeAndTypeCheck(unindexed_properties, basestring)
     if not multiple:
       raise datastore_errors.BadArgumentError(
@@ -329,15 +339,32 @@
 
   def app(self):
     """Returns the name of the application that created this entity, a
-    string.
+    string or None if not set.
     """
     return self.__key.app()
 
+  def namespace(self):
+    """Returns the namespace of this entity, a string or None.
+    """
+    return self.__key.namespace()
+
+  def app_id_namespace(self):
+    """Returns the AppIdNamespace of this entity or None if not set.
+    """
+    return self.__key.app_id_namespace()
+
   def kind(self):
     """Returns this entity's kind, a string.
     """
     return self.__key.kind()
 
+  def is_saved(self):
+    """Returns if this entity has been saved to the datastore
+    """
+    last_path = self.__key._Key__reference.path().element_list()[-1]
+    return ((last_path.has_name() ^ last_path.has_id()) and
+            self.__key.has_id_or_name())
+
   def key(self):
     """Returns this entity's primary key, a Key instance.
     """
@@ -483,7 +510,15 @@
 
     return xml
 
-  def _ToPb(self):
+  def ToPb(self):
+    """Converts this Entity to its protocol buffer representation.
+
+    Returns:
+      entity_pb.Entity
+    """
+    return self._ToPb(False)
+
+  def _ToPb(self, mark_key_as_saved=True):
     """Converts this Entity to its protocol buffer representation. Not
     intended to be used by application developers.
 
@@ -493,6 +528,9 @@
 
     pb = entity_pb.EntityProto()
     pb.mutable_key().CopyFrom(self.key()._ToPb())
+    last_path = pb.key().path().element_list()[-1]
+    if mark_key_as_saved and last_path.has_name() and last_path.has_id():
+      last_path.clear_id()
 
     group = pb.mutable_entity_group()
     if self.__key.has_id_or_name():
@@ -523,7 +561,25 @@
     return pb
 
   @staticmethod
-  def _FromPb(pb):
+  def FromPb(pb):
+    """Static factory method. Returns the Entity representation of the
+    given protocol buffer (datastore_pb.Entity).
+
+    Args:
+      pb: datastore_pb.Entity or str encoding of a datastore_pb.Entity
+
+    Returns:
+      Entity: the Entity representation of pb
+    """
+    if isinstance(pb, str):
+      real_pb = entity_pb.EntityProto()
+      real_pb.ParseFromString(pb)
+      pb = real_pb
+
+    return Entity._FromPb(pb, require_valid_key=False)
+
+  @staticmethod
+  def _FromPb(pb, require_valid_key=True):
     """Static factory method. Returns the Entity representation of the
     given protocol buffer (datastore_pb.Entity). Not intended to be used by
     application developers.
@@ -542,12 +598,13 @@
     assert pb.key().path().element_size() > 0
 
     last_path = pb.key().path().element_list()[-1]
-    assert last_path.has_id() ^ last_path.has_name()
-    if last_path.has_id():
-      assert last_path.id() != 0
-    else:
-      assert last_path.has_name()
-      assert last_path.name()
+    if require_valid_key:
+      assert last_path.has_id() ^ last_path.has_name()
+      if last_path.has_id():
+        assert last_path.id() != 0
+      else:
+        assert last_path.has_name()
+        assert last_path.name()
 
     unindexed_properties = [p.name() for p in pb.raw_property_list()]
 
@@ -701,7 +758,8 @@
   __inequality_prop = None
   __inequality_count = 0
 
-  def __init__(self, kind, filters={}, _app=None, keys_only=False):
+  def __init__(self, kind=None, filters={}, _app=None, keys_only=False,
+               _namespace=None):
     """Constructor.
 
     Raises BadArgumentError if kind is not a string. Raises BadValueError or
@@ -714,15 +772,17 @@
       filters: dict
       keys_only: boolean
     """
-    datastore_types.ValidateString(kind, 'kind',
-                                   datastore_errors.BadArgumentError)
+    if kind is not None:
+      datastore_types.ValidateString(kind, 'kind',
+                                     datastore_errors.BadArgumentError)
 
     self.__kind = kind
     self.__orderings = []
     self.__filter_order = {}
     self.update(filters)
 
-    self.__app = datastore_types.ResolveAppId(_app)
+    self.__app = datastore_types.ResolveAppIdNamespace(_app,
+                                                       _namespace).to_encoded()
     self.__keys_only = keys_only
 
   def Order(self, *orderings):
@@ -794,6 +854,13 @@
             str(direction))
         direction = Query.ASCENDING
 
+      if (self.__kind is None and
+          (property != datastore_types._KEY_SPECIAL_PROPERTY or
+          direction != Query.ASCENDING)):
+        raise datastore_errors.BadArgumentError(
+            'Only %s ascending orders are supported on kindless queries' %
+            datastore_types._KEY_SPECIAL_PROPERTY)
+
       orderings[i] = (property, direction)
 
     if (orderings and self.__inequality_prop and
@@ -884,16 +951,17 @@
     """
     return self._Run()
 
-  def _Run(self, limit=None, offset=None):
+  def _Run(self, limit=None, offset=None,
+           prefetch_count=None, next_count=None):
     """Runs this query, with an optional result limit and an optional offset.
 
-    Identical to Run, with the extra optional limit and offset parameters.
-    limit and offset must both be integers >= 0.
+    Identical to Run, with the extra optional limit, offset, prefetch_count,
+    next_count parameters. These parameters must be integers >= 0.
 
     This is not intended to be used by application developers. Use Get()
     instead!
     """
-    pb = self._ToPb(limit, offset)
+    pb = self._ToPb(limit, offset, prefetch_count)
     result = datastore_pb.QueryResult()
 
     try:
@@ -907,7 +975,7 @@
         raise datastore_errors.NeedIndexError(
           str(exc) + '\nThis query needs this index:\n' + yaml)
 
-    return Iterator(result)
+    return Iterator(result, batch_size=next_count)
 
   def Get(self, limit, offset=0):
     """Fetches and returns a maximum number of results from the query.
@@ -956,7 +1024,8 @@
           'Argument to Get named \'offset\' must be an int greater than or '
           'equal to 0; received %s (a %s)' % (offset, typename(offset)))
 
-    return self._Run(limit, offset)._Get(limit)
+    return self._Run(limit=limit, offset=offset,
+                     prefetch_count=limit)._Get(limit)
 
   def Count(self, limit=None):
     """Returns the number of entities that this query matches. The returned
@@ -1108,6 +1177,12 @@
           'first sort order, if any sort orders are supplied' %
           ', '.join(self.INEQUALITY_OPERATORS))
 
+    if (self.__kind is None and
+        property != datastore_types._KEY_SPECIAL_PROPERTY):
+      raise datastore_errors.BadFilterError(
+          'Only %s filters are allowed on kindless queries.' %
+          datastore_types._KEY_SPECIAL_PROPERTY)
+
     if property in datastore_types._SPECIAL_PROPERTIES:
       if property == datastore_types._KEY_SPECIAL_PROPERTY:
         for value in values:
@@ -1118,7 +1193,7 @@
 
     return match
 
-  def _ToPb(self, limit=None, offset=None):
+  def _ToPb(self, limit=None, offset=None, count=None):
     """Converts this Query to its protocol buffer representation. Not
     intended to be used by application developers. Enforced by hiding the
     datastore_pb classes.
@@ -1129,6 +1204,8 @@
       # number of results that match the query to skip.  limit is applied
       # after the offset is fulfilled
       offset: int
+      # the requested initial batch size
+      count: int
 
     Returns:
       # the PB representation of this Query
@@ -1138,6 +1215,7 @@
       BadRequestError if called inside a transaction and the query does not
       include an ancestor.
     """
+
     if not self.__ancestor and _CurrentTransactionKey():
       raise datastore_errors.BadRequestError(
         'Only ancestor queries are allowed inside transactions.')
@@ -1145,7 +1223,8 @@
     pb = datastore_pb.Query()
     _MaybeSetupTransaction(pb, [self.__ancestor])
 
-    pb.set_kind(self.__kind.encode('utf-8'))
+    if self.__kind is not None:
+      pb.set_kind(self.__kind.encode('utf-8'))
     pb.set_keys_only(bool(self.__keys_only))
     if self.__app:
       pb.set_app(self.__app.encode('utf-8'))
@@ -1153,6 +1232,8 @@
       pb.set_limit(limit)
     if offset is not None:
       pb.set_offset(offset)
+    if count is not None:
+      pb.set_count(count)
     if self.__ancestor:
       pb.mutable_ancestor().CopyFrom(self.__ancestor._Key__reference)
 
@@ -1193,6 +1274,44 @@
     return pb
 
 
+def AllocateIds(model_key, size):
+  """Allocates a range of IDs of size for the key defined by model_key
+
+  Allocates a range of IDs in the datastore such that those IDs will not
+  be automatically assigned to new entities. You can only allocate IDs
+  for model keys from your app. If there is an error, raises a subclass of
+  datastore_errors.Error.
+
+  Args:
+    model_key: Key or string to serve as a model specifying the ID sequence
+               in which to allocate IDs
+
+  Returns:
+    (start, end) of the allocated range, inclusive.
+  """
+  keys, multiple = NormalizeAndTypeCheckKeys(model_key)
+
+  if len(keys) > 1:
+    raise datastore_errors.BadArgumentError(
+        'Cannot allocate IDs for more than one model key at a time')
+
+  if size > _MAX_ID_BATCH_SIZE:
+    raise datastore_errors.BadArgumentError(
+        'Cannot allocate more than %s ids at a time' % _MAX_ID_BATCH_SIZE)
+
+  req = datastore_pb.AllocateIdsRequest()
+  req.mutable_model_key().CopyFrom(keys[0]._Key__reference)
+  req.set_size(size)
+
+  resp = datastore_pb.AllocateIdsResponse()
+  try:
+    apiproxy_stub_map.MakeSyncCall('datastore_v3', 'AllocateIds', req, resp)
+  except apiproxy_errors.ApplicationError, err:
+    raise _ToDatastoreError(err)
+
+  return resp.start(), resp.end()
+
+
 class MultiQuery(Query):
   """Class representing a query which requires multiple datastore queries.
 
@@ -1517,9 +1636,10 @@
   > for person in it:
   >   print 'Hi, %s!' % person['name']
   """
-  def __init__(self, query_result_pb):
+  def __init__(self, query_result_pb, batch_size=None):
     self.__cursor = query_result_pb.cursor()
     self.__keys_only = query_result_pb.keys_only()
+    self.__batch_size = batch_size
     self.__buffer = self._ProcessQueryResult(query_result_pb)
 
   def _Get(self, count):
@@ -1547,16 +1667,16 @@
       # a list of entities or keys
       [Entity or Key, ...]
     """
-    entityList = self._Next(count)
-    while len(entityList) < count and self.__more_results:
-      next_results = self._Next(count - len(entityList))
+    entity_list = self._Next(count)
+    while len(entity_list) < count and self.__more_results:
+      next_results = self._Next(count - len(entity_list), self.__batch_size)
       if not next_results:
         break
-      entityList += next_results
-    return entityList;
-
-  def _Next(self, count):
-    """Returns the next result(s) of the query.
+      entity_list += next_results
+    return entity_list;
+
+  def _Next(self, count=None):
+    """Returns the next batch of results.
 
     Not intended to be used by application developers. Use the python
     iterator protocol instead.
@@ -1565,11 +1685,14 @@
     results. If the query specified a sort order, results are returned in that
     order. Otherwise, the order is undefined.
 
-    The argument, count, specifies the number of results to return. However, the
-    length of the returned list may be smaller than count. This is the case if
-    count is greater than the number of remaining results or the size of the
-    remaining results exciteds the RPC buffer limit. Use _Get to insure all
-    possible entities are retrieved.
+    The optional argument, count, specifies the number of results to return.
+    However, the length of the returned list may be smaller than count. This is
+    the case if count is greater than the number of remaining results or the
+    size of the remaining results exceeds the RPC buffer limit. Use _Get to
+    insure all possible entities are retrieved.
+
+    If the count is omitted, the datastore backend decides how many entities to
+    send.
 
     There is an internal buffer for use with the next() method. If this buffer
     is not empty, up to 'count' values are removed from this buffer and
@@ -1580,19 +1703,23 @@
 
     Args:
       # the number of results to return; must be >= 1
-      count: int or long
+      count: int or long or None
 
     Returns:
       # a list of entities or keys
       [Entity or Key, ...]
     """
-    if not isinstance(count, (int, long)) or count <= 0:
+    if count is not None and (not isinstance(count, (int, long)) or count <= 0):
       raise datastore_errors.BadArgumentError(
         'Argument to _Next must be an int greater than 0; received %s (a %s)' %
         (count, typename(count)))
 
     if self.__buffer:
-      if count <= len(self.__buffer):
+      if count is None:
+        entity_list = self.__buffer
+        self.__buffer = []
+        return entity_list
+      elif count <= len(self.__buffer):
         entity_list = self.__buffer[:count]
         del self.__buffer[:count]
         return entity_list
@@ -1601,13 +1728,15 @@
         self.__buffer = []
         count -= len(entity_list)
     else:
-        entity_list=[]
+        entity_list = []
+
 
     if not self.__more_results:
       return entity_list
 
     req = datastore_pb.NextRequest()
-    req.set_count(count)
+    if count is not None:
+      req.set_count(count)
     req.mutable_cursor().CopyFrom(self.__cursor)
     result = datastore_pb.QueryResult()
     try:
@@ -1642,11 +1771,9 @@
     else:
       return [Entity._FromPb(e) for e in result.result_list()]
 
-  _BUFFER_SIZE = 20
-
   def next(self):
     if not self.__buffer:
-      self.__buffer = self._Next(self._BUFFER_SIZE)
+      self.__buffer = self._Next(self.__batch_size)
     try:
       return self.__buffer.pop(0)
     except IndexError:
@@ -1657,44 +1784,28 @@
 class _Transaction(object):
   """Encapsulates a transaction currently in progress.
 
-  If we've sent a BeginTransaction call, then handle will be a
-  datastore_pb.Transaction that holds the transaction handle.
-
   If we know the entity group for this transaction, it's stored in the
-  entity_group attribute, which is set by RecordModifiedKeys().
+  entity_group attribute, which is set by RunInTransaction().
 
   modified_keys is a set containing the Keys of all entities modified (ie put
   or deleted) in this transaction. If an entity is modified more than once, a
   BadRequestError is raised.
   """
-  def __init__(self):
-    """Initializes modified_keys to the empty set."""
-    self.handle = None
+  def __init__(self, handle):
+    """Initializes the transaction.
+
+    Args:
+      handle: a datastore_pb.Transaction returned by a BeginTransaction call
+    """
+    assert isinstance(handle, datastore_pb.Transaction)
+    explanation = []
+    assert handle.IsInitialized(explanation), explanation
+
+    self.handle = handle
     self.entity_group = None
     self.modified_keys = None
     self.modified_keys = set()
 
-  def RecordModifiedKeys(self, keys, error_on_repeat=True):
-    """Updates the modified keys seen so far.
-
-    If error_on_repeat is True and any of the given keys have already been
-    modified, raises BadRequestError.
-
-    Args:
-      keys: sequence of Keys
-    """
-    keys, _ = NormalizeAndTypeCheckKeys(keys)
-    keys = set(keys)
-
-    if error_on_repeat:
-      already_modified = self.modified_keys.intersection(keys)
-      if already_modified:
-        raise datastore_errors.BadRequestError(
-          "Can't update entity more than once in a transaction: %r" %
-          already_modified.pop())
-
-    self.modified_keys.update(keys)
-
 
 def RunInTransaction(function, *args, **kwargs):
   """Runs a function inside a datastore transaction.
@@ -1799,26 +1910,31 @@
 
   try:
     tx_key = _NewTransactionKey()
-    tx = _Transaction()
-    _txes[tx_key] = tx
 
     for i in range(0, retries + 1):
-      tx.modified_keys.clear()
+      handle = datastore_pb.Transaction()
+      try:
+        apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
+                                       api_base_pb.VoidProto(), handle)
+      except apiproxy_errors.ApplicationError, err:
+        raise _ToDatastoreError(err)
+
+      tx = _Transaction(handle)
+      _txes[tx_key] = tx
 
       try:
         result = function(*args, **kwargs)
       except:
         original_exception = sys.exc_info()
 
-        if tx.handle:
-          try:
-            resp = api_base_pb.VoidProto()
-            apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback',
-                                           tx.handle, resp)
-          except:
-            exc_info = sys.exc_info()
-            logging.info('Exception sending Rollback:\n' +
-                         ''.join(traceback.format_exception(*exc_info)))
+        try:
+          resp = api_base_pb.VoidProto()
+          apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback',
+                                         tx.handle, resp)
+        except:
+          exc_info = sys.exc_info()
+          logging.info('Exception sending Rollback:\n' +
+                       ''.join(traceback.format_exception(*exc_info)))
 
         type, value, trace = original_exception
         if type is datastore_errors.Rollback:
@@ -1826,21 +1942,20 @@
         else:
           raise type, value, trace
 
-      if tx.handle:
-        try:
-          resp = datastore_pb.CommitResponse()
-          apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit',
-                                         tx.handle, resp)
-        except apiproxy_errors.ApplicationError, err:
-          if (err.application_error ==
-              datastore_pb.Error.CONCURRENT_TRANSACTION):
-            logging.warning('Transaction collision for entity group with '
-                            'key %r. Retrying...', tx.entity_group)
-            tx.handle = None
-            tx.entity_group = None
-            continue
-          else:
-            raise _ToDatastoreError(err)
+      try:
+        resp = datastore_pb.CommitResponse()
+        apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit',
+                                       tx.handle, resp)
+      except apiproxy_errors.ApplicationError, err:
+        if (err.application_error ==
+            datastore_pb.Error.CONCURRENT_TRANSACTION):
+          logging.warning('Transaction collision for entity group with '
+                          'key %r. Retrying...', tx.entity_group)
+          tx.handle = None
+          tx.entity_group = None
+          continue
+        else:
+          raise _ToDatastoreError(err)
 
       return result
 
@@ -1854,12 +1969,11 @@
 
 
 def _MaybeSetupTransaction(request, keys):
-  """Begins a transaction, if necessary, and populates it in the request.
+  """If we're in a transaction, validates and populates it in the request.
 
   If we're currently inside a transaction, this records the entity group,
-  checks that the keys are all in that entity group, creates the transaction
-  PB, and sends the BeginTransaction. It then populates the transaction handle
-  in the request.
+  checks that the keys are all in that entity group, and populates the
+  transaction handle in the request.
 
   Raises BadRequestError if the entity has a different entity group than the
   current transaction.
@@ -1872,7 +1986,9 @@
     _Transaction if we're inside a transaction, otherwise None
   """
   assert isinstance(request, (datastore_pb.GetRequest, datastore_pb.PutRequest,
-                              datastore_pb.DeleteRequest, datastore_pb.Query))
+                              datastore_pb.DeleteRequest, datastore_pb.Query,
+                              taskqueue_service_pb.TaskQueueAddRequest,
+                              )), request.__class__
   tx_key = None
 
   try:
@@ -1883,8 +1999,10 @@
       groups = [k.entity_group() for k in keys]
       if tx.entity_group:
         expected_group = tx.entity_group
+      elif groups:
+        expected_group = groups[0]
       else:
-        expected_group = groups[0]
+        expected_group = None
 
       for group in groups:
         if (group != expected_group or
@@ -1901,12 +2019,7 @@
         if not tx.entity_group and group.has_id_or_name():
           tx.entity_group = group
 
-      if not tx.handle:
-        tx.handle = datastore_pb.Transaction()
-        req = api_base_pb.VoidProto()
-        apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction', req,
-                                       tx.handle)
-
+      assert tx.handle.IsInitialized()
       request.mutable_transaction().CopyFrom(tx.handle)
 
       return tx
--- a/thirdparty/google_appengine/google/appengine/api/datastore_admin.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_admin.py	Mon Sep 07 20:27:37 2009 +0200
@@ -39,7 +39,7 @@
     }
 
 
-def GetSchema(_app=None):
+def GetSchema(_app=None, properties=True, start_kind=None, end_kind=None):
   """Infers an app's schema from the entities in the datastore.
 
   Note that the PropertyValue PBs in the returned EntityProtos are empty
@@ -48,11 +48,21 @@
   throw UserNotFoundError because their email and auth domain fields will be
   empty.
 
+  Args:
+    properties: boolean, whether to include property names and types
+    start_kind, end_kind: optional range endpoints for the kinds to return,
+      compared lexicographically
+
   Returns:
     list of entity_pb.EntityProto, with kind and property names and types
   """
-  req = api_base_pb.StringProto()
-  req.set_value(datastore_types.ResolveAppId(_app))
+  req = datastore_pb.GetSchemaRequest()
+  req.set_app(datastore_types.ResolveAppId(_app))
+  req.set_properties(properties)
+  if start_kind is not None:
+    req.set_start_kind(start_kind)
+  if end_kind is not None:
+    req.set_end_kind(end_kind)
   resp = datastore_pb.Schema()
 
   _Call('GetSchema', req, resp)
--- a/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Mon Sep 07 20:27:37 2009 +0200
@@ -75,6 +75,7 @@
 
 _MAX_QUERY_COMPONENTS = 100
 
+_BATCH_SIZE = 20
 
 class _StoredEntity(object):
   """Simple wrapper around an entity stored by the stub.
@@ -105,7 +106,14 @@
     cursor: the integer cursor
     count: the original total number of results
     keys_only: whether the query is keys_only
+
+  Class attributes:
+    _next_cursor: the next cursor to allocate
+    _next_cursor_lock: protects _next_cursor
   """
+  _next_cursor = 1
+  _next_cursor_lock = threading.Lock()
+
   def __init__(self, results, keys_only):
     """Constructor.
 
@@ -117,7 +125,13 @@
     self.__results = results
     self.count = len(results)
     self.keys_only = keys_only
-    self.cursor = id(self)
+
+    self._next_cursor_lock.acquire()
+    try:
+      self.cursor = _Cursor._next_cursor
+      _Cursor._next_cursor += 1
+    finally:
+      self._next_cursor_lock.release()
 
   def PopulateQueryResult(self, result, count):
     """Populates a QueryResult with this cursor and the given number of results.
@@ -272,8 +286,25 @@
       raise datastore_errors.BadRequestError(
           'app %s cannot access app %s\'s data' % (self.__app_id, app_id))
 
+  def __ValidateKey(self, key):
+    """Validate this key.
 
-  def _AppKindForKey(self, key):
+    Args:
+      key: entity_pb.Reference
+
+    Raises:
+      datastore_errors.BadRequestError: if the key is invalid
+    """
+    assert isinstance(key, entity_pb.Reference)
+
+    self.__ValidateAppId(key.app())
+
+    for elem in key.path().element_list():
+      if elem.has_id() == elem.has_name():
+        raise datastore_errors.BadRequestError(
+          'each key path element should have id or name but not both: %r' % key)
+
+  def _AppIdNamespaceKindForKey(self, key):
     """ Get (app, kind) tuple from given key.
 
     The (app, kind) tuple is used as an index into several internal
@@ -295,7 +326,7 @@
       entity: entity_pb.EntityProto
     """
     key = entity.key()
-    app_kind = self._AppKindForKey(key)
+    app_kind = self._AppIdNamespaceKindForKey(key)
     if app_kind not in self.__entities:
       self.__entities[app_kind] = {}
     self.__entities[app_kind][key] = _StoredEntity(entity)
@@ -440,16 +471,20 @@
       self.__file_lock.release()
 
   def MakeSyncCall(self, service, call, request, response):
-    """ The main RPC entry point. service must be 'datastore_v3'. So far, the
-    supported calls are 'Get', 'Put', 'RunQuery', 'Next', and 'Count'.
+    """ The main RPC entry point. service must be 'datastore_v3'.
     """
+    self.assertPbIsInitialized(request)
     super(DatastoreFileStub, self).MakeSyncCall(service,
                                                 call,
                                                 request,
                                                 response)
+    self.assertPbIsInitialized(response)
 
+  def assertPbIsInitialized(self, pb):
+    """Raises an exception if the given PB is not initialized and valid."""
     explanation = []
-    assert response.IsInitialized(explanation), explanation
+    assert pb.IsInitialized(explanation), explanation
+    pb.Encode()
 
   def QueryHistory(self):
     """Returns a dict that maps Query PBs to times they've been run.
@@ -460,7 +495,7 @@
   def _Dynamic_Put(self, put_request, put_response):
     clones = []
     for entity in put_request.entity_list():
-      self.__ValidateAppId(entity.key().app())
+      self.__ValidateKey(entity.key())
 
       clone = entity_pb.EntityProto()
       clone.CopyFrom(entity)
@@ -515,7 +550,7 @@
 
     for key in get_request.key_list():
       self.__ValidateAppId(key.app())
-      app_kind = self._AppKindForKey(key)
+      app_kind = self._AppIdNamespaceKindForKey(key)
 
       group = get_response.add_entity()
       try:
@@ -532,7 +567,7 @@
     try:
       for key in delete_request.key_list():
         self.__ValidateAppId(key.app())
-        app_kind = self._AppKindForKey(key)
+        app_kind = self._AppIdNamespaceKindForKey(key)
         try:
           del self.__entities[app_kind][key]
           if not self.__entities[app_kind]:
@@ -559,8 +594,9 @@
       entities = self.__entities
       self.__tx_lock.release()
 
-    app = query.app()
-    self.__ValidateAppId(app)
+    app_id_namespace = datastore_types.parse_app_id_namespace(query.app())
+    app_id = app_id_namespace.app_id()
+    self.__ValidateAppId(app_id)
 
     if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
       raise apiproxy_errors.ApplicationError(
@@ -575,11 +611,14 @@
           ('query is too large. may not have more than %s filters'
            ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))
 
+    (filters, orders) = datastore_index.Normalize(query.filter_list(),
+                                                  query.order_list())
+
     if self.__require_indexes:
       required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
       if required:
         required_key = kind, ancestor, props
-        indexes = self.__indexes.get(app)
+        indexes = self.__indexes.get(app_id)
         if not indexes:
           raise apiproxy_errors.ApplicationError(
               datastore_pb.Error.NEED_INDEX,
@@ -606,9 +645,15 @@
               "You must update the index.yaml file in your application root.")
 
     try:
-      query.set_app(app)
-      results = entities[app, query.kind()].values()
-      results = [entity.native for entity in results]
+      query.set_app(app_id_namespace.to_encoded())
+      if query.has_kind():
+        results = entities[app_id_namespace.to_encoded(), query.kind()].values()
+        results = [entity.native for entity in results]
+      else:
+        results = []
+        for key in entities:
+          if key[0] == app_id_namespace.to_encoded():
+            results += [entity.native for entity in entities[key].values()]
     except KeyError:
       results = []
 
@@ -642,7 +687,7 @@
           return True
       return False
 
-    for filt in query.filter_list():
+    for filt in filters:
       assert filt.op() != datastore_pb.Query_Filter.IN
 
       prop = filt.property(0).name().decode('utf-8')
@@ -694,7 +739,7 @@
 
       results = filter(passes_filter, results)
 
-    for order in query.order_list():
+    for order in orders:
       prop = order.property().decode('utf-8')
       results = [entity for entity in results if has_prop_indexed(entity, prop)]
 
@@ -703,7 +748,7 @@
       entity a is considered smaller than, equal to, or larger than b,
       according to the query's orderings. """
       cmped = 0
-      for o in query.order_list():
+      for o in orders:
         prop = o.property().decode('utf-8')
 
         reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
@@ -773,7 +818,15 @@
 
     cursor = _Cursor(results, query.keys_only())
     self.__queries[cursor.cursor] = cursor
-    cursor.PopulateQueryResult(query_result, 0)
+
+    if query.has_count():
+      count = query.count()
+    elif query.has_limit():
+      count = query.limit()
+    else:
+      count = _BATCH_SIZE
+
+    cursor.PopulateQueryResult(query_result, count)
 
   def _Dynamic_Next(self, next_request, query_result):
     cursor_handle = next_request.cursor().cursor()
@@ -784,7 +837,10 @@
       raise apiproxy_errors.ApplicationError(
           datastore_pb.Error.BAD_REQUEST, 'Cursor %d not found' % cursor_handle)
 
-    cursor.PopulateQueryResult(query_result, next_request.count())
+    count = _BATCH_SIZE
+    if next_request.has_count():
+      count = next_request.count()
+    cursor.PopulateQueryResult(query_result, count)
 
   def _Dynamic_Count(self, query, integer64proto):
     self.__ValidateAppId(query.app())
@@ -830,70 +886,94 @@
     self.__tx_snapshot = {}
     self.__tx_lock.release()
 
-  def _Dynamic_GetSchema(self, app_str, schema):
-    minint = -sys.maxint - 1
-    try:
-      minfloat = float('-inf')
-    except ValueError:
-      minfloat = -1e300000
-
-    app_str = app_str.value()
+  def _Dynamic_GetSchema(self, req, schema):
+    app_str = req.app()
     self.__ValidateAppId(app_str)
 
     kinds = []
 
     for app, kind in self.__entities:
-      if app == app_str:
-        app_kind = (app, kind)
-        if app_kind in self.__schema_cache:
-          kinds.append(self.__schema_cache[app_kind])
-          continue
+      if (app != app_str or
+          (req.has_start_kind() and kind < req.start_kind()) or
+          (req.has_end_kind() and kind > req.end_kind())):
+        continue
+
+      app_kind = (app, kind)
+      if app_kind in self.__schema_cache:
+        kinds.append(self.__schema_cache[app_kind])
+        continue
 
-        kind_pb = entity_pb.EntityProto()
-        kind_pb.mutable_key().set_app('')
-        kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
-        kind_pb.mutable_entity_group()
+      kind_pb = entity_pb.EntityProto()
+      kind_pb.mutable_key().set_app('')
+      kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
+      kind_pb.mutable_entity_group()
 
-        props = {}
+      props = {}
 
-        for entity in self.__entities[app_kind].values():
-          for prop in entity.protobuf.property_list():
-            if prop.name() not in props:
-              props[prop.name()] = entity_pb.PropertyValue()
-            props[prop.name()].MergeFrom(prop.value())
+      for entity in self.__entities[app_kind].values():
+        for prop in entity.protobuf.property_list():
+          if prop.name() not in props:
+            props[prop.name()] = entity_pb.PropertyValue()
+          props[prop.name()].MergeFrom(prop.value())
 
-        for value_pb in props.values():
-          if value_pb.has_int64value():
-            value_pb.set_int64value(minint)
-          if value_pb.has_booleanvalue():
-            value_pb.set_booleanvalue(False)
-          if value_pb.has_stringvalue():
-            value_pb.set_stringvalue('')
-          if value_pb.has_doublevalue():
-            value_pb.set_doublevalue(minfloat)
-          if value_pb.has_pointvalue():
-            value_pb.mutable_pointvalue().set_x(minfloat)
-            value_pb.mutable_pointvalue().set_y(minfloat)
-          if value_pb.has_uservalue():
-            value_pb.mutable_uservalue().set_gaiaid(minint)
-            value_pb.mutable_uservalue().set_email('')
-            value_pb.mutable_uservalue().set_auth_domain('')
-            value_pb.mutable_uservalue().clear_nickname()
-          elif value_pb.has_referencevalue():
-            value_pb.clear_referencevalue()
-            value_pb.mutable_referencevalue().set_app('')
+      for value_pb in props.values():
+        if value_pb.has_int64value():
+          value_pb.set_int64value(0)
+        if value_pb.has_booleanvalue():
+          value_pb.set_booleanvalue(False)
+        if value_pb.has_stringvalue():
+          value_pb.set_stringvalue('none')
+        if value_pb.has_doublevalue():
+          value_pb.set_doublevalue(0.0)
+        if value_pb.has_pointvalue():
+          value_pb.mutable_pointvalue().set_x(0.0)
+          value_pb.mutable_pointvalue().set_y(0.0)
+        if value_pb.has_uservalue():
+          value_pb.mutable_uservalue().set_gaiaid(0)
+          value_pb.mutable_uservalue().set_email('none')
+          value_pb.mutable_uservalue().set_auth_domain('none')
+          value_pb.mutable_uservalue().clear_nickname()
+          value_pb.mutable_uservalue().clear_obfuscated_gaiaid()
+        if value_pb.has_referencevalue():
+          value_pb.clear_referencevalue()
+          value_pb.mutable_referencevalue().set_app('none')
+          pathelem = value_pb.mutable_referencevalue().add_pathelement()
+          pathelem.set_type('none')
+          pathelem.set_name('none')
 
-        for name, value_pb in props.items():
-          prop_pb = kind_pb.add_property()
-          prop_pb.set_name(name)
-          prop_pb.set_multiple(False)
-          prop_pb.mutable_value().CopyFrom(value_pb)
+      for name, value_pb in props.items():
+        prop_pb = kind_pb.add_property()
+        prop_pb.set_name(name)
+        prop_pb.set_multiple(False)
+        prop_pb.mutable_value().CopyFrom(value_pb)
 
-        kinds.append(kind_pb)
-        self.__schema_cache[app_kind] = kind_pb
+      kinds.append(kind_pb)
+      self.__schema_cache[app_kind] = kind_pb
 
     for kind_pb in kinds:
-      schema.add_kind().CopyFrom(kind_pb)
+      kind = schema.add_kind()
+      kind.CopyFrom(kind_pb)
+      if not req.properties():
+        kind.clear_property()
+
+    schema.set_more_results(False)
+
+  def _Dynamic_AllocateIds(self, allocate_ids_request, allocate_ids_response):
+    model_key = allocate_ids_request.model_key()
+    size = allocate_ids_request.size()
+
+    self.__ValidateAppId(model_key.app())
+
+    try:
+      self.__id_lock.acquire()
+      start = self.__next_id
+      self.__next_id += size
+      end = self.__next_id - 1
+    finally:
+     self.__id_lock.release()
+
+    allocate_ids_response.set_start(start)
+    allocate_ids_response.set_end(end)
 
   def _Dynamic_CreateIndex(self, index, id_response):
     self.__ValidateAppId(index.app_id())
--- a/thirdparty/google_appengine/google/appengine/api/datastore_types.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_types.py	Mon Sep 07 20:27:37 2009 +0200
@@ -47,6 +47,7 @@
 from google.appengine.datastore import datastore_pb
 from google.appengine.api import datastore_errors
 from google.appengine.api import users
+from google.appengine.api import namespace_manager
 from google.net.proto import ProtocolBuffer
 from google.appengine.datastore import entity_pb
 
@@ -59,6 +60,8 @@
 _KEY_SPECIAL_PROPERTY = '__key__'
 _SPECIAL_PROPERTIES = frozenset([_KEY_SPECIAL_PROPERTY])
 
+_NAMESPACE_SEPARATOR='!'
+
 class UtcTzinfo(datetime.tzinfo):
   def utcoffset(self, dt): return datetime.timedelta(0)
   def dst(self, dt): return datetime.timedelta(0)
@@ -80,7 +83,8 @@
 def ValidateString(value,
                    name='unused',
                    exception=datastore_errors.BadValueError,
-                   max_len=_MAX_STRING_LENGTH):
+                   max_len=_MAX_STRING_LENGTH,
+                   empty_ok=False):
   """Raises an exception if value is not a valid string or a subclass thereof.
 
   A string is valid if it's not empty, no more than _MAX_STRING_LENGTH bytes,
@@ -91,17 +95,49 @@
     value: the value to validate.
     name: the name of this value; used in the exception message.
     exception: the type of exception to raise.
-    max_len: the maximum allowed length, in bytes
+    max_len: the maximum allowed length, in bytes.
+    empty_ok: allow empty value.
   """
+  if value is None and empty_ok:
+    return
   if not isinstance(value, basestring) or isinstance(value, Blob):
     raise exception('%s should be a string; received %s (a %s):' %
                     (name, value, typename(value)))
-  if not value:
+  if not value and not empty_ok:
     raise exception('%s must not be empty.' % name)
 
   if len(value.encode('utf-8')) > max_len:
     raise exception('%s must be under %d bytes.' % (name, max_len))
 
+def ValidateInteger(value,
+                   name='unused',
+                   exception=datastore_errors.BadValueError,
+                   empty_ok=False,
+                   zero_ok=False,
+                   negative_ok=False):
+  """Raises an exception if value is not a valid integer.
+
+  An integer is valid if it's not negative or empty and is an integer.
+  The exception type can be specified with the exception argument;
+  it defaults to BadValueError.
+
+  Args:
+    value: the value to validate.
+    name: the name of this value; used in the exception message.
+    exception: the type of exception to raise.
+    empty_ok: allow None value.
+    zero_ok: allow zero value.
+    negative_ok: allow negative value.
+  """
+  if value is None and empty_ok:
+    return
+  if not isinstance(value, int):
+    raise exception('%s should be an integer; received %s (a %s).' %
+                    (name, value, typename(value)))
+  if not value and not zero_ok:
+    raise exception('%s must not be 0 (zero)' % name)
+  if value < 0 and not negative_ok:
+    raise exception('%s must not be negative.' % name)
 
 def ResolveAppId(app, name='_app'):
   """Validate app id, providing a default.
@@ -124,6 +160,152 @@
   return app
 
 
+class AppIdNamespace(object):
+  """Combined AppId and Namespace
+
+  An identifier that combines the application identifier and the
+  namespace.
+  """
+  __app_id = None
+  __namespace = None
+
+  def __init__(self, app_id, namespace):
+    """Constructor. Creates a AppIdNamespace from two strings.
+
+    Args:
+      app_id: application identifier string
+      namespace: namespace identifier string
+    Raises:
+      BadArgumentError if the values contain
+      the _NAMESPACE_SEPARATOR character (!) or
+      the app_id is empty.
+    """
+    self.__app_id = app_id
+    if namespace:
+      self.__namespace = namespace
+    else:
+      self.__namespace = None
+    ValidateString(self.__app_id, 'app_id', datastore_errors.BadArgumentError)
+    ValidateString(self.__namespace,
+                   'namespace', datastore_errors.BadArgumentError,
+                   empty_ok=True)
+    if _NAMESPACE_SEPARATOR in self.__app_id:
+      raise datastore_errors.BadArgumentError(
+        'app_id must not contain a "%s"' % _NAMESPACE_SEPARATOR)
+    if self.__namespace and _NAMESPACE_SEPARATOR in self.__namespace:
+      raise datastore_errors.BadArgumentError(
+        'namespace must not contain a "%s"' % _NAMESPACE_SEPARATOR)
+
+  def __cmp__(self, other):
+    """Returns negative, zero, or positive when comparing two AppIdNamespace.
+
+    Args:
+      other: AppIdNamespace to compare to.
+
+    Returns:
+      Negative if self is less than "other"
+      Zero if "other" is equal to self
+      Positive if self is greater than "other"
+    """
+    if not isinstance(other, AppIdNamespace):
+      return cmp(id(self), id(other))
+    return cmp((self.__app_id, self.__namespace),
+               (other.__app_id, other.__namespace))
+
+  def to_encoded(self):
+    """Returns this AppIdNamespace's string equivalent
+
+    i.e. "app!namespace"
+    """
+    if not self.__namespace:
+      return self.__app_id
+    else:
+      return self.__app_id + _NAMESPACE_SEPARATOR + self.__namespace
+
+  def app_id(self):
+    """Returns this AppId portion of this AppIdNamespace.
+    """
+    return self.__app_id;
+
+  def namespace(self):
+    """Returns this namespace portion of this AppIdNamespace.
+    """
+    return self.__namespace;
+
+
+def PartitionString(value, separator):
+  """Equivalent to python2.5 str.partition()
+     TODO(gmariani) use str.partition() when python 2.5 is adopted.
+
+  Args:
+    value: String to be partitioned
+    separator: Separator string
+  """
+  index = value.find(separator);
+  if index == -1:
+    return (value, '', value[0:0]);
+  else:
+    return (value[0:index], separator, value[index+len(separator):len(value)])
+
+
+def parse_app_id_namespace(app_id_namespace):
+  """
+  An app_id_namespace string is valid if it's not empty, and contains
+  at most one namespace separator ('!').  Also, an app_id_namespace
+  with an empty namespace must not contain a namespace separator.
+
+  Args:
+    app_id_namespace: an encoded app_id_namespace.
+  Raises exception if format of app_id_namespace is invalid.
+  """
+  if not app_id_namespace:
+    raise datastore_errors.BadArgumentError(
+        'app_id_namespace must be non empty')
+  parts = PartitionString(app_id_namespace, _NAMESPACE_SEPARATOR)
+  if parts[1] == _NAMESPACE_SEPARATOR:
+    if not parts[2]:
+      raise datastore_errors.BadArgumentError(
+        'app_id_namespace must not contain a "%s" if the namespace is empty' %
+        _NAMESPACE_SEPARATOR)
+  if parts[2]:
+    return AppIdNamespace(parts[0], parts[2])
+  return AppIdNamespace(parts[0], None)
+
+def ResolveAppIdNamespace(
+    app_id=None, namespace=None, app_id_namespace=None):
+  """Validate an app id/namespace and substitute default values.
+
+  If the argument is None, $APPLICATION_ID!$NAMESPACE is substituted.
+
+  Args:
+    app_id: The app id argument value to be validated.
+    namespace: The namespace argument value to be validated.
+    app_id_namespace: An AppId/Namespace pair
+
+  Returns:
+    An AppIdNamespace object initialized with AppId and Namespace.
+
+  Raises:
+    BadArgumentError if the value is empty or not a string.
+  """
+  if app_id_namespace is None:
+    if app_id is None:
+      app_id = os.environ.get('APPLICATION_ID', '')
+    if namespace is None:
+      namespace = namespace_manager.get_request_namespace();
+  else:
+    if not app_id is None:
+      raise datastore_errors.BadArgumentError(
+          'app_id is overspecified.  Cannot define app_id_namespace and app_id')
+    if not namespace is None:
+      raise datastore_errors.BadArgumentError(
+          'namespace is overspecified.  ' +
+          'Cannot define app_id_namespace and namespace')
+    return parse_app_id_namespace(app_id_namespace)
+
+  return AppIdNamespace(app_id, namespace)
+
+
 class Key(object):
   """The primary key for a datastore entity.
 
@@ -172,6 +354,26 @@
     else:
       self.__reference = entity_pb.Reference()
 
+  def to_path(self):
+    """Construct the "path" of this key as a list.
+
+    Returns:
+      A list [kind_1, id_or_name_1, ..., kind_n, id_or_name_n] of the key path.
+
+    Raises:
+      datastore_errors.BadKeyError if this key does not have a valid path.
+    """
+    path = []
+    for path_element in self.__reference.path().element_list():
+      path.append(path_element.type().decode('utf-8'))
+      if path_element.has_name():
+        path.append(path_element.name().decode('utf-8'))
+      elif path_element.has_id():
+        path.append(path_element.id())
+      else:
+        raise datastore_errors.BadKeyError('Incomplete key found in to_path')
+    return path
+
   @staticmethod
   def from_path(*args, **kwds):
     """Static method to construct a Key out of a "path" (kind, id or name, ...).
@@ -202,7 +404,10 @@
       BadKeyError if the parent key is incomplete.
     """
     parent = kwds.pop('parent', None)
-    _app = ResolveAppId(kwds.pop('_app', None))
+    _app_id_namespace_obj = ResolveAppIdNamespace(
+        kwds.pop('_app', None),
+        kwds.pop('_namespace', None),
+        kwds.pop('_app_id_namespace', None))
 
     if kwds:
       raise datastore_errors.BadArgumentError(
@@ -221,17 +426,18 @@
       if not parent.has_id_or_name():
         raise datastore_errors.BadKeyError(
             'The parent Key is incomplete.')
-      if _app != parent.app():
+      if _app_id_namespace_obj != parent.app_id_namespace():
         raise datastore_errors.BadArgumentError(
-            'The _app argument (%r) should match parent.app() (%s)' %
-            (_app, parent.app()))
+            'The app_id/namespace arguments (%r) should match ' +
+            'parent.app_id_namespace().to_encoded() (%s)' %
+            (_app_id_namespace_obj, parent.app_id_namespace()))
 
     key = Key()
     ref = key.__reference
     if parent is not None:
       ref.CopyFrom(parent.__reference)
     else:
-      ref.set_app(_app)
+      ref.set_app(_app_id_namespace_obj.to_encoded())
 
     path = ref.mutable_path()
     for i in xrange(0, len(args), 2):
@@ -248,9 +454,6 @@
         elem.set_id(id_or_name)
       elif isinstance(id_or_name, basestring):
         ValidateString(id_or_name, 'name')
-        if id_or_name and id_or_name[0] in string.digits:
-          raise datastore_errors.BadArgumentError(
-            'Names may not begin with a digit; received %s.' % id_or_name)
         elem.set_name(id_or_name.encode('utf-8'))
       else:
         raise datastore_errors.BadArgumentError(
@@ -263,7 +466,21 @@
   def app(self):
     """Returns this entity's app id, a string."""
     if self.__reference.app():
-      return self.__reference.app().decode('utf-8')
+      return self.app_id_namespace().app_id().decode('utf-8')
+    else:
+      return None
+
+  def namespace(self):
+    """Returns this entity's app id, a string."""
+    if self.__reference.app():
+      return self.app_id_namespace().namespace().decode('utf-8')
+    else:
+      return None
+
+  def app_id_namespace(self):
+    """Returns this entity's app id/namespace, an appIdNamespace object."""
+    if self.__reference.app():
+      return parse_app_id_namespace(self.__reference.app())
     else:
       return None
 
@@ -339,11 +556,13 @@
       raise datastore_errors.BadKeyError(
         'ToTagUri() called for an entity with an incomplete key.')
 
-    return u'tag:%s.%s,%s:%s[%s]' % (saxutils.escape(self.app()),
-                                     os.environ['AUTH_DOMAIN'],
-                                     datetime.date.today().isoformat(),
-                                     saxutils.escape(self.kind()),
-                                     saxutils.escape(str(self)))
+    return u'tag:%s.%s,%s:%s[%s]' % (
+        saxutils.escape(self.app_id_namespace().to_encoded()),
+        os.environ['AUTH_DOMAIN'],
+        datetime.date.today().isoformat(),
+        saxutils.escape(self.kind()),
+        saxutils.escape(str(self)))
+
   ToXml = ToTagUri
 
   def entity_group(self):
@@ -436,7 +655,7 @@
       else:
         args.append(repr(elem.id()))
 
-    args.append('_app=%r' % self.__reference.app().decode('utf-8'))
+    args.append('_app_id_namespace=%r' % self.__reference.app().decode('utf-8'))
     return u'datastore_types.Key.from_path(%s)' % ', '.join(args)
 
   def __cmp__(self, other):
@@ -459,25 +678,29 @@
     self_args = []
     other_args = []
 
-    self_args.append(self.__reference.app().decode('utf-8'))
-    other_args.append(other.__reference.app().decode('utf-8'))
+    self_args.append(self.__reference.app())
+    other_args.append(other.__reference.app())
 
     for elem in self.__reference.path().element_list():
-      self_args.append(repr(elem.type()))
+      self_args.append(elem.type())
       if elem.has_name():
-        self_args.append(repr(elem.name().decode('utf-8')))
+        self_args.append(elem.name())
       else:
         self_args.append(elem.id())
 
     for elem in other.__reference.path().element_list():
-      other_args.append(repr(elem.type()))
+      other_args.append(elem.type())
       if elem.has_name():
-        other_args.append(repr(elem.name().decode('utf-8')))
+        other_args.append(elem.name())
       else:
         other_args.append(elem.id())
 
-    result = cmp(self_args, other_args)
-    return result
+    for self_component, other_component in zip(self_args, other_args):
+      comparison = cmp(self_component, other_component)
+      if comparison != 0:
+        return comparison
+
+    return cmp(len(self_args), len(other_args))
 
   def __hash__(self):
     """Returns a 32-bit integer hash of this key.
@@ -698,6 +921,7 @@
       except datastore_errors.BadValueError:
         return NotImplemented
 
+
     return cmp((self.address, self.protocol),
                (other.address, other.protocol))
 
@@ -900,6 +1124,63 @@
     return saxutils.escape(encoded)
 
 
+class BlobKey(object):
+  """Key used to identify a blob in Blobstore.
+
+  This object wraps a string that gets used internally by the Blobstore API
+  to identify application blobs.  The BlobKey corresponds to the entity name
+  of the underlying BlobReference entity.  The structure of the key is:
+
+    _<blob-key>
+
+  This class is exposed in the API in both google.appengine.ext.db and
+  google.appengine.ext.blobstore.
+  """
+
+  def __init__(self, blob_key):
+    """Constructor.
+
+    Used to convert a string to a BlobKey.  Normally used internally by
+    Blobstore API.
+
+    Args:
+      blob_key:  Key name of BlobReference that this key belongs to.
+    """
+    self.__blob_key = blob_key
+
+  def __str__(self):
+    """Convert to string."""
+    return self.__blob_key
+
+  def __repr__(self):
+    """Returns an eval()able string representation of this key.
+
+    Returns a Python string of the form 'datastore_types.BlobKey(...)'
+    that can be used to recreate this key.
+
+    Returns:
+      string
+    """
+    s = type(self).__module__
+    return '%s.%s(%r)' % (type(self).__module__,
+                       type(self).__name__,
+                       self.__blob_key)
+
+  def __cmp__(self, other):
+    if type(other) is type(self):
+      return cmp(str(self), str(other))
+    elif isinstance(other, basestring):
+      return cmp(self.__blob_key, other)
+    else:
+      return NotImplemented
+
+  def __hash__(self):
+    return hash(self.__blob_key)
+
+  def ToXml(self):
+    return str(self)
+
+
 _PROPERTY_MEANINGS = {
 
 
@@ -916,6 +1197,7 @@
   PhoneNumber:       entity_pb.Property.GD_PHONENUMBER,
   PostalAddress:     entity_pb.Property.GD_POSTALADDRESS,
   Rating:            entity_pb.Property.GD_RATING,
+  BlobKey:           entity_pb.Property.BLOBKEY,
 }
 
 _PROPERTY_TYPES = frozenset([
@@ -940,6 +1222,7 @@
   type(None),
   unicode,
   users.User,
+  BlobKey,
 ])
 
 _RAW_PROPERTY_TYPES = (Blob, Text)
@@ -1043,6 +1326,7 @@
   type(None): ValidatePropertyNothing,
   unicode: ValidatePropertyString,
   users.User: ValidatePropertyNothing,
+  BlobKey: ValidatePropertyString,
 }
 
 assert set(_VALIDATE_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
@@ -1222,6 +1506,7 @@
   """
   pbvalue.set_doublevalue(value)
 
+
 _PACK_PROPERTY_VALUES = {
   Blob: PackBlob,
   ByteString: PackBlob,
@@ -1244,6 +1529,7 @@
   type(None): lambda name, value, pbvalue: None,
   unicode: PackString,
   users.User: PackUser,
+  BlobKey: PackString,
 }
 
 assert set(_PACK_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
@@ -1331,7 +1617,6 @@
   entity_pb.Property.ATOM_CATEGORY:     Category,
   entity_pb.Property.ATOM_LINK:         Link,
   entity_pb.Property.GD_EMAIL:          Email,
-  entity_pb.Property.GEORSS_POINT:      lambda coords: GeoPt(*coords),
   entity_pb.Property.GD_IM:             IM,
   entity_pb.Property.GD_PHONENUMBER:    PhoneNumber,
   entity_pb.Property.GD_POSTALADDRESS:  PostalAddress,
@@ -1339,6 +1624,7 @@
   entity_pb.Property.BLOB:              Blob,
   entity_pb.Property.BYTESTRING:        ByteString,
   entity_pb.Property.TEXT:              Text,
+  entity_pb.Property.BLOBKEY:           BlobKey,
 }
 
 
@@ -1368,7 +1654,7 @@
   elif pbval.has_referencevalue():
     value = FromReferenceProperty(pbval)
   elif pbval.has_pointvalue():
-    value = (pbval.pointvalue().x(), pbval.pointvalue().y())
+    value = GeoPt(pbval.pointvalue().x(), pbval.pointvalue().y())
   elif pbval.has_uservalue():
     email = unicode(pbval.uservalue().email().decode('utf-8'))
     auth_domain = unicode(pbval.uservalue().auth_domain().decode('utf-8'))
@@ -1381,7 +1667,7 @@
     value = None
 
   try:
-    if pb.has_meaning():
+    if pb.has_meaning() and pb.meaning() in _PROPERTY_CONVERSIONS:
       conversion = _PROPERTY_CONVERSIONS[meaning]
       value = conversion(value)
   except (KeyError, ValueError, IndexError, TypeError, AttributeError), msg:
@@ -1437,6 +1723,7 @@
     'gd:phonenumber':   PhoneNumber,
     'gd:postaladdress': PostalAddress,
     'gd:rating':        Rating,
+    'blobkey':          BlobKey,
     }
 
 
--- a/thirdparty/google_appengine/google/appengine/api/images/images_service_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/images/images_service_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -80,13 +80,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -150,13 +154,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -466,6 +474,10 @@
     if self.has_autolevels_: res+=prefix+("autolevels: %s\n" % self.DebugFormatBool(self.autolevels_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kwidth = 1
   kheight = 2
   krotate = 3
@@ -477,43 +489,33 @@
   kcrop_bottom_y = 9
   kautolevels = 10
 
-  _TEXT = (
-   "ErrorCode",
-   "width",
-   "height",
-   "rotate",
-   "horizontal_flip",
-   "vertical_flip",
-   "crop_left_x",
-   "crop_top_y",
-   "crop_right_x",
-   "crop_bottom_y",
-   "autolevels",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "width",
+    2: "height",
+    3: "rotate",
+    4: "horizontal_flip",
+    5: "vertical_flip",
+    6: "crop_left_x",
+    7: "crop_top_y",
+    8: "crop_right_x",
+    9: "crop_bottom_y",
+    10: "autolevels",
+  }, 10)
 
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+    6: ProtocolBuffer.Encoder.FLOAT,
+    7: ProtocolBuffer.Encoder.FLOAT,
+    8: ProtocolBuffer.Encoder.FLOAT,
+    9: ProtocolBuffer.Encoder.FLOAT,
+    10: ProtocolBuffer.Encoder.NUMERIC,
+  }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -583,18 +585,21 @@
     if self.has_content_: res+=prefix+("content: %s\n" % self.DebugFormatString(self.content_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcontent = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "content",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "content",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -673,18 +678,21 @@
     if self.has_mime_type_: res+=prefix+("mime_type: %s\n" % self.DebugFormatInt32(self.mime_type_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kmime_type = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "mime_type",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "mime_type",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -834,26 +842,27 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kimage = 1
   ktransform = 2
   koutput = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "image",
-   "transform",
-   "output",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "image",
+    2: "transform",
+    3: "output",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -926,18 +935,21 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kimage = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "image",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "image",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1152,34 +1164,33 @@
     if self.has_anchor_: res+=prefix+("anchor: %s\n" % self.DebugFormatInt32(self.anchor_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   ksource_index = 1
   kx_offset = 2
   ky_offset = 3
   kopacity = 4
   kanchor = 5
 
-  _TEXT = (
-   "ErrorCode",
-   "source_index",
-   "x_offset",
-   "y_offset",
-   "opacity",
-   "anchor",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "source_index",
+    2: "x_offset",
+    3: "y_offset",
+    4: "opacity",
+    5: "anchor",
+  }, 5)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.FLOAT,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1339,30 +1350,30 @@
     if self.has_color_: res+=prefix+("color: %s\n" % self.DebugFormatInt32(self.color_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kwidth = 1
   kheight = 2
   koutput = 3
   kcolor = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "width",
-   "height",
-   "output",
-   "color",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "width",
+    2: "height",
+    3: "output",
+    4: "color",
+  }, 4)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1523,26 +1534,27 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kimage = 1
   koptions = 2
   kcanvas = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "image",
-   "options",
-   "canvas",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "image",
+    2: "options",
+    3: "canvas",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1615,18 +1627,21 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kimage = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "image",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "image",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1699,18 +1714,21 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kimage = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "image",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "image",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1855,26 +1873,27 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kred = 1
   kgreen = 2
   kblue = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "red",
-   "green",
-   "blue",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "red",
+    2: "green",
+    3: "blue",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1947,18 +1966,21 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   khistogram = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "histogram",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "histogram",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py	Mon Sep 07 20:27:37 2009 +0200
@@ -480,19 +480,9 @@
     """
     return self.__enqueued
 
-  def add(self, queue_name=_DEFAULT_QUEUE):
-    """Adds this Task to a queue.
-
-    Args:
-      queue_name: Name of the queue to add this Task to. (optional)
-
-    Returns:
-      This Task itself.
-
-    Raises:
-      BadTaskStateError if this task has already been enqueued.
-    """
-    return Queue(queue_name).add(self)
+  def add(self, queue_name=_DEFAULT_QUEUE, transactional=True):
+    """Adds this Task to a queue. See Queue.add."""
+    return Queue(queue_name).add(self, transactional=transactional)
 
 
 class Queue(object):
@@ -514,11 +504,13 @@
     self.__name = name
     self.__url = '%s/%s' % (_DEFAULT_QUEUE_PATH, self.__name)
 
-  def add(self, task):
+  def add(self, task, transactional=True):
     """Adds a Task to this Queue.
 
     Args:
       task: The Task to add.
+      transactional: If false adds the task to a queue irrespectively to the
+        enclosing transaction success or failure. (optional)
 
     Returns:
       The Task that was supplied to this method.
@@ -555,6 +547,10 @@
       header.set_key(key)
       header.set_value(value)
 
+    if transactional:
+      from google.appengine.api import datastore
+      datastore._MaybeSetupTransaction(request, [])
+
     call_tuple = ('taskqueue', 'Add', request, response)
     apiproxy_stub_map.apiproxy.GetPreCallHooks().Call(*call_tuple)
     try:
--- a/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -22,6 +22,7 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
+from google.appengine.datastore.datastore_v3_pb import *
 class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
@@ -37,6 +38,7 @@
   TASK_ALREADY_EXISTS =   10
   TOMBSTONED_TASK =   11
   INVALID_ETA  =   12
+  INVALID_REQUEST =   13
 
   _ErrorCode_NAMES = {
     0: "OK",
@@ -52,6 +54,7 @@
     10: "TASK_ALREADY_EXISTS",
     11: "TOMBSTONED_TASK",
     12: "INVALID_ETA",
+    13: "INVALID_REQUEST",
   }
 
   def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -96,13 +99,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -234,9 +241,12 @@
   url_ = ""
   has_body_ = 0
   body_ = ""
+  has_transaction_ = 0
+  transaction_ = None
 
   def __init__(self, contents=None):
     self.header_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
   def queue_name(self): return self.queue_name_
@@ -333,6 +343,24 @@
 
   def has_body(self): return self.has_body_
 
+  def transaction(self):
+    if self.transaction_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.transaction_ is None: self.transaction_ = Transaction()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.transaction_
+
+  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+  def clear_transaction(self):
+    if self.has_transaction_:
+      self.has_transaction_ = 0;
+      if self.transaction_ is not None: self.transaction_.Clear()
+
+  def has_transaction(self): return self.has_transaction_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -343,6 +371,7 @@
     if (x.has_url()): self.set_url(x.url())
     for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
     if (x.has_body()): self.set_body(x.body())
+    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
 
   def Equals(self, x):
     if x is self: return 1
@@ -361,6 +390,8 @@
       if e1 != e2: return 0
     if self.has_body_ != x.has_body_: return 0
     if self.has_body_ and self.body_ != x.body_: return 0
+    if self.has_transaction_ != x.has_transaction_: return 0
+    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -383,6 +414,7 @@
         debug_strs.append('Required field: url not set.')
     for p in self.header_:
       if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
@@ -395,6 +427,7 @@
     n += 2 * len(self.header_)
     for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
     if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
+    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
     return n + 4
 
   def Clear(self):
@@ -405,6 +438,7 @@
     self.clear_url()
     self.clear_header()
     self.clear_body()
+    self.clear_transaction()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -425,6 +459,10 @@
     if (self.has_body_):
       out.putVarInt32(74)
       out.putPrefixedString(self.body_)
+    if (self.has_transaction_):
+      out.putVarInt32(82)
+      out.putVarInt32(self.transaction_.ByteSize())
+      self.transaction_.OutputUnchecked(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -450,6 +488,12 @@
       if tt == 74:
         self.set_body(d.getPrefixedString())
         continue
+      if tt == 82:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_transaction().TryMerge(tmp)
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -470,8 +514,16 @@
       res+=prefix+"}\n"
       cnt+=1
     if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
+    if self.has_transaction_:
+      res+=prefix+"transaction <\n"
+      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kqueue_name = 1
   ktask_name = 2
   keta_usec = 3
@@ -481,41 +533,35 @@
   kHeaderkey = 7
   kHeadervalue = 8
   kbody = 9
-
-  _TEXT = (
-   "ErrorCode",
-   "queue_name",
-   "task_name",
-   "eta_usec",
-   "url",
-   "method",
-   "Header",
-   "key",
-   "value",
-   "body",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
+  ktransaction = 10
 
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "queue_name",
+    2: "task_name",
+    3: "eta_usec",
+    4: "url",
+    5: "method",
+    6: "Header",
+    7: "key",
+    8: "value",
+    9: "body",
+    10: "transaction",
+  }, 10)
 
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+    6: ProtocolBuffer.Encoder.STARTGROUP,
+    7: ProtocolBuffer.Encoder.STRING,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.STRING,
+    10: ProtocolBuffer.Encoder.STRING,
+  }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -582,18 +628,21 @@
     if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kchosen_task_name = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "chosen_task_name",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "chosen_task_name",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -779,34 +828,33 @@
     if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp_id = 1
   kqueue_name = 2
   kbucket_refill_per_second = 3
   kbucket_capacity = 4
   kuser_specified_rate = 5
 
-  _TEXT = (
-   "ErrorCode",
-   "app_id",
-   "queue_name",
-   "bucket_refill_per_second",
-   "bucket_capacity",
-   "user_specified_rate",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app_id",
+    2: "queue_name",
+    3: "bucket_refill_per_second",
+    4: "bucket_capacity",
+    5: "user_specified_rate",
+  }, 5)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.DOUBLE,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -850,13 +898,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -956,22 +1008,24 @@
     if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp_id = 1
   kmax_rows = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "app_id",
-   "max_rows",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app_id",
+    2: "max_rows",
+  }, 2)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1204,34 +1258,33 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kQueueGroup = 1
   kQueuequeue_name = 2
   kQueuebucket_refill_per_second = 3
   kQueuebucket_capacity = 4
   kQueueuser_specified_rate = 5
 
-  _TEXT = (
-   "ErrorCode",
-   "Queue",
-   "queue_name",
-   "bucket_refill_per_second",
-   "bucket_capacity",
-   "user_specified_rate",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Queue",
+    2: "queue_name",
+    3: "bucket_refill_per_second",
+    4: "bucket_capacity",
+    5: "user_specified_rate",
+  }, 5)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.DOUBLE,
+    4: ProtocolBuffer.Encoder.DOUBLE,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1366,26 +1419,27 @@
     if self.has_max_num_tasks_: res+=prefix+("max_num_tasks: %s\n" % self.DebugFormatInt32(self.max_num_tasks_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp_id = 1
   kqueue_name = 2
   kmax_num_tasks = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "app_id",
-   "queue_name",
-   "max_num_tasks",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app_id",
+    2: "queue_name",
+    3: "max_num_tasks",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1563,26 +1617,27 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kQueueStatsGroup = 1
   kQueueStatsnum_tasks = 2
   kQueueStatsoldest_eta_usec = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "QueueStats",
-   "num_tasks",
-   "oldest_eta_usec",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "QueueStats",
+    2: "num_tasks",
+    3: "oldest_eta_usec",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py	Mon Sep 07 20:27:37 2009 +0200
@@ -43,9 +43,20 @@
 
 DEFAULT_BUCKET_SIZE = 5
 
+MAX_ETA_DELTA_DAYS = 30
+
 
 def _ParseQueueYaml(unused_self, root_path):
-  """Load the queue.yaml file and parse it."""
+  """Loads the queue.yaml file and parses it.
+
+  Args:
+    unused_self: Allows this function to be bound to a class member. Not used.
+    root_path: Directory containing queue.yaml. Not used.
+
+  Returns:
+    None if queue.yaml doesn't exist, otherwise a queueinfo.QueueEntry object
+    populaeted from the queue.yaml.
+  """
   if root_path is None:
     return None
   for queueyaml in ('queue.yaml', 'queue.yml'):
@@ -61,8 +72,16 @@
   return None
 
 
-def _CompareEta(a, b):
-  """Python sort comparator for task ETAs."""
+def _CompareTasksByEta(a, b):
+  """Python sort comparator for tasks by estimated time of arrival (ETA).
+
+  Args:
+    a: A taskqueue_service_pb.TaskQueueAddRequest.
+    b: A taskqueue_service_pb.TaskQueueAddRequest.
+
+  Returns:
+    Standard 1/0/-1 comparison result.
+  """
   if a.eta_usec() > b.eta_usec():
     return 1
   if a.eta_usec() < b.eta_usec():
@@ -106,29 +125,63 @@
         available.
     """
     super(TaskQueueServiceStub, self).__init__(service_name)
-    self.taskqueues = {}
-    self.next_task_id = 1
-    self.root_path = root_path
+    self._taskqueues = {}
+    self._next_task_id = 1
+    self._root_path = root_path
+
+  def _Dynamic_Add(self, request, response):
+    """Local implementation of the Add RPC in TaskQueueService.
+
+    Must adhere to the '_Dynamic_' naming convention for stubbing to work.
+    See taskqueue_service.proto for a full description of the RPC.
 
-  def _Dynamic_Add(self, request, unused_response):
-    if not self._ValidQueue(request.queue_name()):
+    Args:
+      request: A taskqueue_service_pb.TaskQueueAddRequest.
+      response: A taskqueue_service_pb.TaskQueueAddResponse.
+    """
+    if request.eta_usec() < 0:
+      raise apiproxy_errors.ApplicationError(
+          taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
+
+    eta = datetime.datetime.utcfromtimestamp(request.eta_usec() / 1e6)
+    max_eta = (datetime.datetime.utcnow() +
+               datetime.timedelta(days=MAX_ETA_DELTA_DAYS))
+    if eta > max_eta:
+      raise apiproxy_errors.ApplicationError(
+          taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
+
+    if not self._IsValidQueue(request.queue_name()):
       raise apiproxy_errors.ApplicationError(
           taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
-      return
 
     if not request.task_name():
-      request.set_task_name('task%d' % self.next_task_id)
-      self.next_task_id += 1
+      request.set_task_name('task%d' % self._next_task_id)
+      response.set_chosen_task_name(request.task_name())
+      self._next_task_id += 1
 
-    tasks = self.taskqueues.setdefault(request.queue_name(), [])
+    tasks = self._taskqueues.setdefault(request.queue_name(), [])
+    for task in tasks:
+      if task.task_name() == request.task_name():
+        raise apiproxy_errors.ApplicationError(
+            taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS)
     tasks.append(request)
-    tasks.sort(_CompareEta)
-    return
+    tasks.sort(_CompareTasksByEta)
+
+  def _IsValidQueue(self, queue_name):
+    """Determines whether a queue is valid, i.e. tasks can be added to it.
 
-  def _ValidQueue(self, queue_name):
+    Valid queues are the 'default' queue, plus any queues in the queue.yaml
+    file.
+
+    Args:
+      queue_name: the name of the queue to validate.
+
+    Returns:
+      True iff queue is valid.
+    """
     if queue_name == 'default':
       return True
-    queue_info = self.queue_yaml_parser(self.root_path)
+    queue_info = self.queue_yaml_parser(self._root_path)
     if queue_info and queue_info.queue:
       for entry in queue_info.queue:
         if entry.name == queue_name:
@@ -140,10 +193,16 @@
 
     Returns:
       A list of dictionaries, where each dictionary contains one queue's
-      attributes.
+      attributes. E.g.:
+        [{'name': 'some-queue',
+          'max_rate': '1/s',
+          'bucket_size': 5,
+          'oldest_task': '2009/02/02 05:37:42',
+          'eta_delta': '0:00:06.342511 ago',
+          'tasks_in_queue': 12}, ...]
     """
     queues = []
-    queue_info = self.queue_yaml_parser(self.root_path)
+    queue_info = self.queue_yaml_parser(self._root_path)
     has_default = False
     if queue_info and queue_info.queue:
       for entry in queue_info.queue:
@@ -158,7 +217,7 @@
         else:
           queue['bucket_size'] = DEFAULT_BUCKET_SIZE
 
-        tasks = self.taskqueues.setdefault(entry.name, [])
+        tasks = self._taskqueues.setdefault(entry.name, [])
         if tasks:
           queue['oldest_task'] = _FormatEta(tasks[0].eta_usec())
           queue['eta_delta'] = _EtaDelta(tasks[0].eta_usec())
@@ -173,7 +232,7 @@
       queue['max_rate'] = DEFAULT_RATE
       queue['bucket_size'] = DEFAULT_BUCKET_SIZE
 
-      tasks = self.taskqueues.get('default', [])
+      tasks = self._taskqueues.get('default', [])
       if tasks:
         queue['oldest_task'] = _FormatEta(tasks[0].eta_usec())
         queue['eta_delta'] = _EtaDelta(tasks[0].eta_usec())
@@ -190,9 +249,24 @@
 
     Returns:
       A list of dictionaries, where each dictionary contains one task's
-      attributes.
+      attributes. E.g.
+        [{'name': 'task-123',
+          'url': '/update',
+          'method': 'GET',
+          'eta': '2009/02/02 05:37:42',
+          'eta_delta': '0:00:06.342511 ago',
+          'body': '',
+          'headers': {'X-AppEngine-QueueName': 'update-queue',
+                      'X-AppEngine-TaskName': 'task-123',
+                      'X-AppEngine-TaskRetryCount': '0',
+                      'X-AppEngine-Development-Payload': '1',
+                      'Content-Length': 0,
+                      'Content-Type': 'application/octet-streamn'}, ...]
+
+    Raises:
+      ValueError: A task request contains an unknown HTTP method type.
     """
-    tasks = self.taskqueues.get(queue_name, [])
+    tasks = self._taskqueues.get(queue_name, [])
     result_tasks = []
     for task_request in tasks:
       task = {}
@@ -200,16 +274,18 @@
       task['name'] = task_request.task_name()
       task['url'] = task_request.url()
       method = task_request.method()
-      if (method == taskqueue_service_pb.TaskQueueAddRequest.GET):
+      if method == taskqueue_service_pb.TaskQueueAddRequest.GET:
         task['method'] = 'GET'
-      elif (method == taskqueue_service_pb.TaskQueueAddRequest.POST):
+      elif method == taskqueue_service_pb.TaskQueueAddRequest.POST:
         task['method'] = 'POST'
-      elif (method == taskqueue_service_pb.TaskQueueAddRequest.HEAD):
+      elif method == taskqueue_service_pb.TaskQueueAddRequest.HEAD:
         task['method'] = 'HEAD'
-      elif (method == taskqueue_service_pb.TaskQueueAddRequest.PUT):
+      elif method == taskqueue_service_pb.TaskQueueAddRequest.PUT:
         task['method'] = 'PUT'
-      elif (method == taskqueue_service_pb.TaskQueueAddRequest.DELETE):
+      elif method == taskqueue_service_pb.TaskQueueAddRequest.DELETE:
         task['method'] = 'DELETE'
+      else:
+        raise ValueError('Unexpected method: %d' % method)
 
       task['eta'] = _FormatEta(task_request.eta_usec())
       task['eta_delta'] = _EtaDelta(task_request.eta_usec())
@@ -236,7 +312,7 @@
       queue_name: the name of the queue to delete the task from.
       task_name: the name of the task to delete.
     """
-    tasks = self.taskqueues.get(queue_name, [])
+    tasks = self._taskqueues.get(queue_name, [])
     for task in tasks:
       if task.task_name() == task_name:
         tasks.remove(task)
@@ -248,4 +324,4 @@
     Args:
       queue_name: the name of the queue to remove tasks from.
     """
-    self.taskqueues[queue_name] = []
+    self._taskqueues[queue_name] = []
--- a/thirdparty/google_appengine/google/appengine/api/mail.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/mail.py	Mon Sep 07 20:27:37 2009 +0200
@@ -25,10 +25,12 @@
 
 
 
+
+import email
 from email import MIMEBase
 from email import MIMEMultipart
 from email import MIMEText
-import types
+import logging
 
 from google.appengine.api import api_base_pb
 from google.appengine.api import apiproxy_stub_map
@@ -38,51 +40,53 @@
 from google.appengine.runtime import apiproxy_errors
 
 
+
 ERROR_MAP = {
-  mail_service_pb.MailServiceError.BAD_REQUEST:
-    BadRequestError,
+    mail_service_pb.MailServiceError.BAD_REQUEST:
+      BadRequestError,
 
-  mail_service_pb.MailServiceError.UNAUTHORIZED_SENDER:
-    InvalidSenderError,
+    mail_service_pb.MailServiceError.UNAUTHORIZED_SENDER:
+      InvalidSenderError,
 
-  mail_service_pb.MailServiceError.INVALID_ATTACHMENT_TYPE:
-    InvalidAttachmentTypeError,
+    mail_service_pb.MailServiceError.INVALID_ATTACHMENT_TYPE:
+      InvalidAttachmentTypeError,
 }
 
 
 EXTENSION_MIME_MAP = {
-  'asc': 'text/plain',
-  'bmp': 'image/x-ms-bmp',
-  'css': 'text/css',
-  'csv': 'text/csv',
-  'diff': 'text/plain',
-  'gif': 'image/gif',
-  'htm': 'text/html',
-  'html': 'text/html',
-  'ics': 'text/calendar',
-  'jpe': 'image/jpeg',
-  'jpeg': 'image/jpeg',
-  'jpg': 'image/jpeg',
-  'pdf': 'application/pdf',
-  'png': 'image/png',
-  'pot': 'text/plain',
-  'rss': 'text/rss+xml',
-  'text': 'text/plain',
-  'tif': 'image/tiff',
-  'tiff': 'image/tiff',
-  'txt': 'text/plain',
-  'vcf': 'text/directory',
-  'wbmp': 'image/vnd.wap.wbmp',
-}
+    'asc': 'text/plain',
+    'bmp': 'image/x-ms-bmp',
+    'css': 'text/css',
+    'csv': 'text/csv',
+    'diff': 'text/plain',
+    'gif': 'image/gif',
+    'htm': 'text/html',
+    'html': 'text/html',
+    'ics': 'text/calendar',
+    'jpe': 'image/jpeg',
+    'jpeg': 'image/jpeg',
+    'jpg': 'image/jpeg',
+    'pdf': 'application/pdf',
+    'png': 'image/png',
+    'pot': 'text/plain',
+    'rss': 'text/rss+xml',
+    'text': 'text/plain',
+    'tif': 'image/tiff',
+    'tiff': 'image/tiff',
+    'txt': 'text/plain',
+    'vcf': 'text/directory',
+    'wbmp': 'image/vnd.wap.wbmp',
+    }
 
 EXTENSION_WHITELIST = frozenset(EXTENSION_MIME_MAP.iterkeys())
 
 
 def invalid_email_reason(email_address, field):
-  """Determine reason why email is invalid
+  """Determine reason why email is invalid.
 
   Args:
     email_address: Email to check.
+    field: Field that is invalid.
 
   Returns:
     String indicating invalid email reason if there is one,
@@ -93,7 +97,7 @@
 
   if isinstance(email_address, users.User):
     email_address = email_address.email()
-  if not isinstance(email_address, types.StringTypes):
+  if not isinstance(email_address, basestring):
     return 'Invalid email address type for %s.' % field
   stripped_address = email_address.strip()
   if not stripped_address:
@@ -118,10 +122,11 @@
 
 
 def check_email_valid(email_address, field):
-  """Check that email is valid
+  """Check that email is valid.
 
   Args:
     email_address: Email to check.
+    field: Field to check.
 
   Raises:
     InvalidEmailError if email_address is invalid.
@@ -165,7 +170,7 @@
     Single tuple with email in it if only one email string provided,
     else returns emails as is.
   """
-  if isinstance(emails, types.StringTypes):
+  if isinstance(emails, basestring):
     return emails,
   return emails
 
@@ -183,11 +188,29 @@
     Single tuple with attachment tuple in it if only one attachment provided,
     else returns attachments as is.
   """
-  if len(attachments) == 2 and isinstance(attachments[0], types.StringTypes):
+  if len(attachments) == 2 and isinstance(attachments[0], basestring):
     return attachments,
   return attachments
 
 
+def _parse_mime_message(mime_message):
+  """Helper function converts a mime_message in to email.Message.Message.
+
+  Args:
+    mime_message: MIME Message, string or file containing mime message.
+
+  Returns:
+    Instance of email.Message.Message.  Will return mime_message if already
+    an instance.
+  """
+  if isinstance(mime_message, email.Message.Message):
+    return mime_message
+  elif isinstance(mime_message, basestring):
+    return email.message_from_string(mime_message)
+  else:
+    return email.message_from_file(mime_message)
+
+
 def send_mail(sender,
               to,
               subject,
@@ -285,7 +308,7 @@
   to a list of comma separated email addresses.
 
   Args:
-    message: Message PB to convert to MIMEMultitype.
+    protocol_message: Message PB to convert to MIMEMultitype.
 
   Returns:
     MIMEMultitype representing the provided MailMessage.
@@ -334,7 +357,7 @@
 
 
 def _to_str(value):
-  """Helper function to make sure unicode values converted to utf-8
+  """Helper function to make sure unicode values converted to utf-8.
 
   Args:
     value: str or unicode to convert to utf-8.
@@ -346,6 +369,129 @@
     return value.encode('utf-8')
   return value
 
+
+class EncodedPayload(object):
+  """Wrapper for a payload that contains encoding information.
+
+  When an email is recieved, it is usually encoded using a certain
+  character set, and then possibly further encoded using a transfer
+  encoding in that character set.  Most of the times, it is possible
+  to decode the encoded payload as is, however, in the case where it
+  is not, the encoded payload and the original encoding information
+  must be preserved.
+
+  Attributes:
+    payload: The original encoded payload.
+    charset: The character set of the encoded payload.  None means use
+      default character set.
+    encoding: The transfer encoding of the encoded payload.  None means
+      content not encoded.
+  """
+
+  def __init__(self, payload, charset=None, encoding=None):
+    """Constructor.
+
+    Args:
+      payload: Maps to attribute of the same name.
+      charset: Maps to attribute of the same name.
+      encoding: Maps to attribute of the same name.
+    """
+    self.payload = payload
+    self.charset = charset
+    self.encoding = encoding
+
+  def decode(self):
+    """Attempt to decode the encoded data.
+
+    Attempt to use pythons codec library to decode the payload.  All
+    exceptions are passed back to the caller.
+
+    Returns:
+      Binary or unicode version of payload content.
+    """
+    payload = self.payload
+
+    if self.encoding and self.encoding.lower() != '7bit':
+      try:
+        payload = payload.decode(self.encoding).lower()
+      except LookupError:
+        raise UnknownEncodingError('Unknown decoding %s.' % self.encoding)
+      except (Exception, Error), e:
+        raise PayloadEncodingError('Could not decode payload: %s' % e)
+
+    if self.charset and str(self.charset).lower() != '7bit':
+      try:
+        payload = payload.decode(str(self.charset)).lower()
+      except LookupError:
+        raise UnknownCharsetError('Unknown charset %s.' % self.charset)
+      except (Exception, Error), e:
+        raise PayloadEncodingError('Could read characters: %s' % e)
+
+    return payload
+
+  def __eq__(self, other):
+    """Equality operator.
+
+    Args:
+      other: The other EncodedPayload object to compare with.  Comparison
+        with other object types are not implemented.
+
+    Returns:
+      True of payload and encodings are equal, else false.
+    """
+    if isinstance(other, EncodedPayload):
+      return (self.payload == other.payload and
+              self.charset == other.charset and
+              self.encoding == other.encoding)
+    else:
+      return NotImplemented
+
+  def copy_to(self, mime_message):
+    """Copy contents to MIME message payload.
+
+    If no content transfer encoding is specified, and the character set does
+    not equal the over-all message encoding, the payload will be base64
+    encoded.
+
+    Args:
+      mime_message: Message instance to receive new payload.
+    """
+    if self.encoding:
+      mime_message['content-transfer-encoding'] = self.encoding
+    mime_message.set_payload(self.payload, self.charset)
+
+  def to_mime_message(self):
+    """Convert to MIME message.
+
+    Returns:
+      MIME message instance of payload.
+    """
+    mime_message = email.Message.Message()
+    self.copy_to(mime_message)
+    return mime_message
+
+  def __str__(self):
+    """String representation of encoded message.
+
+    Returns:
+      MIME encoded representation of encoded payload as an independent message.
+    """
+    return str(self.to_mime_message())
+
+  def __repr__(self):
+    """Basic representation of encoded payload.
+
+    Returns:
+      Payload itself is represented by its hash value.
+    """
+    result = '<EncodedPayload payload=#%d' % hash(self.payload)
+    if self.charset:
+      result += ' charset=%s' % self.charset
+    if self.encoding:
+      result += ' encoding=%s' % self.encoding
+    return result + '>'
+
+
 class _EmailMessageBase(object):
   """Base class for email API service objects.
 
@@ -354,25 +500,39 @@
   """
 
   PROPERTIES = set([
-    'sender',
-    'reply_to',
-    'subject',
-    'body',
-    'html',
-    'attachments',
+      'sender',
+      'reply_to',
+      'subject',
+      'body',
+      'html',
+      'attachments',
   ])
 
-  def __init__(self, **kw):
+  PROPERTIES.update(('to', 'cc', 'bcc'))
+
+  def __init__(self, mime_message=None, **kw):
     """Initialize Email message.
 
     Creates new MailMessage protocol buffer and initializes it with any
     keyword arguments.
 
     Args:
+      mime_message: MIME message to initialize from.  If instance of
+        email.Message.Message will take ownership as original message.
       kw: List of keyword properties as defined by PROPERTIES.
     """
+    if mime_message:
+      mime_message = _parse_mime_message(mime_message)
+      self.update_from_mime_message(mime_message)
+      self.__original = mime_message
+
     self.initialize(**kw)
 
+  @property
+  def original(self):
+    """Get original MIME message from which values were set."""
+    return self.__original
+
   def initialize(self, **kw):
     """Keyword initialization.
 
@@ -398,6 +558,7 @@
       - Subject must be set.
       - A recipient must be specified.
       - Must contain a body.
+      - All bodies and attachments must decode properly.
 
     This check does not include determining if the sender is actually
     authorized to send email for the application.
@@ -410,17 +571,45 @@
         MissingSenderError:         No sender specified.
         MissingSubjectError:        Subject is not specified.
         MissingBodyError:           No body specified.
+        PayloadEncodingError:       Payload is not properly encoded.
+        UnknownEncodingError:       Payload has unknown encoding.
+        UnknownCharsetError:        Payload has unknown character set.
     """
     if not hasattr(self, 'sender'):
       raise MissingSenderError()
     if not hasattr(self, 'subject'):
       raise MissingSubjectError()
-    if not hasattr(self, 'body') and not hasattr(self, 'html'):
+
+    found_body = False
+
+    try:
+      body = self.body
+    except AttributeError:
+      pass
+    else:
+      if isinstance(body, EncodedPayload):
+        body.decode()
+      found_body = True
+
+    try:
+      html = self.html
+    except AttributeError:
+      pass
+    else:
+      if isinstance(html, EncodedPayload):
+        html.decode()
+      found_body = True
+
+    if not found_body:
       raise MissingBodyError()
+
     if hasattr(self, 'attachments'):
       for file_name, data in _attachment_sequence(self.attachments):
         _GetMimeType(file_name)
 
+        if isinstance(data, EncodedPayload):
+          data.decode()
+
   def CheckInitialized(self):
     self.check_initialized()
 
@@ -448,6 +637,10 @@
 
     Returns:
       MailMessage protocol version of mail message.
+
+    Raises:
+      Passes through decoding errors that occur when using when decoding
+      EncodedPayload objects.
     """
     self.check_initialized()
     message = mail_service_pb.MailMessage()
@@ -456,13 +649,22 @@
     if hasattr(self, 'reply_to'):
       message.set_replyto(_to_str(self.reply_to))
     message.set_subject(_to_str(self.subject))
+
     if hasattr(self, 'body'):
-      message.set_textbody(_to_str(self.body))
+      body = self.body
+      if isinstance(body, EncodedPayload):
+        body = body.decode()
+      message.set_textbody(_to_str(body))
     if hasattr(self, 'html'):
-      message.set_htmlbody(_to_str(self.html))
+      html = self.html
+      if isinstance(html, EncodedPayload):
+        html = html.decode()
+      message.set_htmlbody(_to_str(html))
 
     if hasattr(self, 'attachments'):
       for file_name, data in _attachment_sequence(self.attachments):
+        if isinstance(data, EncodedPayload):
+          data = data.decode()
         attachment = message.add_attachment()
         attachment.set_filename(_to_str(file_name))
         attachment.set_data(_to_str(data))
@@ -485,7 +687,7 @@
       MissingSenderError:         No sender specified.
       MissingSubjectError:        Subject is not specified.
       MissingBodyError:           No body specified.
-  """
+    """
     return mail_message_to_mime_message(self.ToProto())
 
   def ToMIMEMessage(self):
@@ -517,8 +719,8 @@
 
   def _check_attachment(self, attachment):
     file_name, data = attachment
-    if not (isinstance(file_name, types.StringTypes) or
-            isinstance(data, types.StringTypes)):
+    if not (isinstance(file_name, basestring) or
+            isinstance(data, basestring)):
       raise TypeError()
 
   def _check_attachments(self, attachments):
@@ -534,7 +736,7 @@
     Raises:
       TypeError if values are not string type.
     """
-    if len(attachments) == 2 and isinstance(attachments[0], types.StringTypes):
+    if len(attachments) == 2 and isinstance(attachments[0], basestring):
       self._check_attachment(attachments)
     else:
       for attachment in attachments:
@@ -548,21 +750,134 @@
     Args:
       attr: Attribute to access.
       value: New value for field.
+
+    Raises:
+      ValueError: If provided with an empty field.
+      AttributeError: If not an allowed assignment field.
     """
-    if attr in ['sender', 'reply_to']:
-      check_email_valid(value, attr)
+    if not attr.startswith('_EmailMessageBase'):
+      if attr in ['sender', 'reply_to']:
+        check_email_valid(value, attr)
 
-    if not value:
-      raise ValueError('May not set empty value for \'%s\'' % attr)
+      if not value:
+        raise ValueError('May not set empty value for \'%s\'' % attr)
 
-    if attr not in self.PROPERTIES:
-      raise AttributeError('\'EmailMessage\' has no attribute \'%s\'' % attr)
+      if attr not in self.PROPERTIES:
+        raise AttributeError('\'EmailMessage\' has no attribute \'%s\'' % attr)
 
-    if attr == 'attachments':
-      self._check_attachments(value)
+      if attr == 'attachments':
+        self._check_attachments(value)
 
     super(_EmailMessageBase, self).__setattr__(attr, value)
 
+  def _add_body(self, content_type, payload):
+    """Add body to email from payload.
+
+    Will overwrite any existing default plain or html body.
+
+    Args:
+      content_type: Content-type of body.
+      payload: Payload to store body as.
+    """
+    if content_type == 'text/plain':
+      self.body = payload
+    elif content_type == 'text/html':
+      self.html = payload
+
+  def _update_payload(self, mime_message):
+    """Update payload of mail message from mime_message.
+
+    This function works recusively when it receives a multipart body.
+    If it receives a non-multi mime object, it will determine whether or
+    not it is an attachment by whether it has a filename or not.  Attachments
+    and bodies are then wrapped in EncodedPayload with the correct charsets and
+    encodings.
+
+    Args:
+      mime_message: A Message MIME email object.
+    """
+    payload = mime_message.get_payload()
+
+    if payload:
+      if mime_message.get_content_maintype() == 'multipart':
+        for alternative in payload:
+          self._update_payload(alternative)
+      else:
+        filename = mime_message.get_param('filename',
+                                          header='content-disposition')
+        if not filename:
+          filename = mime_message.get_param('name')
+
+        payload = EncodedPayload(payload,
+                                 mime_message.get_charset(),
+                                 mime_message['content-transfer-encoding'])
+
+        if filename:
+          try:
+            attachments = self.attachments
+          except AttributeError:
+            self.attachments = (filename, payload)
+          else:
+            if isinstance(attachments[0], basestring):
+              self.attachments = [attachments]
+              attachments = self.attachments
+            attachments.append((filename, payload))
+        else:
+          self._add_body(mime_message.get_content_type(), payload)
+
+  def update_from_mime_message(self, mime_message):
+    """Copy information from a mime message.
+
+    Set information of instance to values of mime message.  This method
+    will only copy values that it finds.  Any missing values will not
+    be copied, nor will they overwrite old values with blank values.
+
+    This object is not guaranteed to be initialized after this call.
+
+    Args:
+      mime_message: email.Message instance to copy information from.
+
+    Returns:
+      MIME Message instance of mime_message argument.
+    """
+    mime_message = _parse_mime_message(mime_message)
+
+    sender = mime_message['from']
+    if sender:
+      self.sender = sender
+
+    reply_to = mime_message['reply-to']
+    if reply_to:
+      self.reply_to = reply_to
+
+    subject = mime_message['subject']
+    if subject:
+      self.subject = subject
+
+    self._update_payload(mime_message)
+
+  def bodies(self, content_type=None):
+    """Iterate over all bodies.
+
+    Yields:
+      Tuple (content_type, payload) for html and body in that order.
+    """
+    if (not content_type or
+        content_type == 'text' or
+        content_type == 'text/html'):
+      try:
+        yield 'text/html', self.html
+      except AttributeError:
+        pass
+
+    if (not content_type or
+        content_type == 'text' or
+        content_type == 'text/plain'):
+      try:
+        yield 'text/plain', self.body
+      except AttributeError:
+        pass
+
 
 class EmailMessage(_EmailMessageBase):
   """Main interface to email API service.
@@ -592,8 +907,7 @@
   """
 
   _API_CALL = 'Send'
-  PROPERTIES = _EmailMessageBase.PROPERTIES
-  PROPERTIES.update(('to', 'cc', 'bcc'))
+  PROPERTIES = set(_EmailMessageBase.PROPERTIES)
 
   def check_initialized(self):
     """Provide additional checks to ensure recipients have been specified.
@@ -629,13 +943,46 @@
   def __setattr__(self, attr, value):
     """Provides additional checks on recipient fields."""
     if attr in ['to', 'cc', 'bcc']:
-      if isinstance(value, types.StringTypes):
+      if isinstance(value, basestring):
         check_email_valid(value, attr)
       else:
-        _email_check_and_list(value, attr)
+        for address in value:
+          check_email_valid(address, attr)
 
     super(EmailMessage, self).__setattr__(attr, value)
 
+  def update_from_mime_message(self, mime_message):
+    """Copy information from a mime message.
+
+    Update fields for recipients.
+
+    Args:
+      mime_message: email.Message instance to copy information from.
+    """
+    mime_message = _parse_mime_message(mime_message)
+    super(EmailMessage, self).update_from_mime_message(mime_message)
+
+    to = mime_message.get_all('to')
+    if to:
+      if len(to) == 1:
+        self.to = to[0]
+      else:
+        self.to = to
+
+    cc = mime_message.get_all('cc')
+    if cc:
+      if len(cc) == 1:
+        self.cc = cc[0]
+      else:
+        self.cc = cc
+
+    bcc = mime_message.get_all('bcc')
+    if bcc:
+      if len(bcc) == 1:
+        self.bcc = bcc[0]
+      else:
+        self.bcc = bcc
+
 
 class AdminEmailMessage(_EmailMessageBase):
   """Interface to sending email messages to all admins via the amil API.
@@ -667,3 +1014,114 @@
   """
 
   _API_CALL = 'SendToAdmins'
+  __UNUSED_PROPERTIES = set(('to', 'cc', 'bcc'))
+
+  def __setattr__(self, attr, value):
+    if attr in self.__UNUSED_PROPERTIES:
+      logging.warning('\'%s\' is not a valid property to set '
+                      'for AdminEmailMessage.  It is unused.', attr)
+    super(AdminEmailMessage, self).__setattr__(attr, value)
+
+
+class InboundEmailMessage(EmailMessage):
+  """Parsed email object as recevied from external source.
+
+  Has a date field and can store any number of additional bodies.  These
+  additional attributes make the email more flexible as required for
+  incoming mail, where the developer has less control over the content.
+
+  Example Usage:
+
+    # Read mail message from CGI input.
+    message = InboundEmailMessage(sys.stdin.read())
+    logging.info('Received email message from %s at %s',
+                 message.sender,
+                 message.date)
+    enriched_body = list(message.bodies('text/enriched'))[0]
+    ... Do something with body ...
+  """
+
+  __HEADER_PROPERTIES = {'date': 'date',
+                         'message_id': 'message-id',
+                        }
+
+  PROPERTIES = frozenset(_EmailMessageBase.PROPERTIES |
+                         set(('alternate_bodies',)) |
+                         set(__HEADER_PROPERTIES.iterkeys()))
+
+  def update_from_mime_message(self, mime_message):
+    """Update values from MIME message.
+
+    Copies over date values.
+
+    Args:
+      mime_message: email.Message instance to copy information from.
+    """
+    mime_message = _parse_mime_message(mime_message)
+    super(InboundEmailMessage, self).update_from_mime_message(mime_message)
+
+    for property, header in InboundEmailMessage.__HEADER_PROPERTIES.iteritems():
+      value = mime_message[header]
+      if value:
+        setattr(self, property, value)
+
+  def _add_body(self, content_type, payload):
+    """Add body to inbound message.
+
+    Method is overidden to handle incoming messages that have more than one
+    plain or html bodies or has any unidentified bodies.
+
+    This method will not overwrite existing html and body values.  This means
+    that when updating, the text and html bodies that are first in the MIME
+    document order are assigned to the body and html properties.
+
+    Args:
+      content_type: Content-type of additional body.
+      payload: Content of additional body.
+    """
+    if (content_type == 'text/plain' and not hasattr(self, 'body') or
+        content_type == 'text/html' and not hasattr(self, 'html')):
+      super(InboundEmailMessage, self)._add_body(content_type, payload)
+    else:
+      try:
+        alternate_bodies = self.alternate_bodies
+      except AttributeError:
+        alternate_bodies = self.alternate_bodies = [(content_type, payload)]
+      else:
+        alternate_bodies.append((content_type, payload))
+
+  def bodies(self, content_type=None):
+    """Iterate over all bodies.
+
+    Args:
+      content_type: Content type to filter on.  Allows selection of only
+        specific types of content.  Can be just the base type of the content
+        type.  For example:
+          content_type = 'text/html'  # Matches only HTML content.
+          content_type = 'text'       # Matches text of any kind.
+
+    Yields:
+      Tuple (content_type, payload) for all bodies of message, including body,
+      html and all alternate_bodies in that order.
+    """
+    main_bodies = super(InboundEmailMessage, self).bodies(content_type)
+    for payload_type, payload in main_bodies:
+      yield payload_type, payload
+
+    partial_type = bool(content_type and content_type.find('/') < 0)
+
+    try:
+      for payload_type, payload in self.alternate_bodies:
+        if content_type:
+          if partial_type:
+            match_type = payload_type.split('/')[0]
+          else:
+            match_type = payload_type
+          match = match_type == content_type
+        else:
+          match = True
+
+        if match:
+          yield payload_type, payload
+    except AttributeError:
+      pass
--- a/thirdparty/google_appengine/google/appengine/api/mail_errors.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/mail_errors.py	Mon Sep 07 20:27:37 2009 +0200
@@ -44,3 +44,12 @@
 
 class MissingBodyError(Error):
   """No body specified in message."""
+
+class PayloadEncodingError(Error):
+  """Unknown payload encoding."""
+
+class UnknownEncodingError(PayloadEncodingError):
+  """Raised when encoding is not known."""
+
+class UnknownCharsetError(PayloadEncodingError):
+  """Raised when charset is not known."""
--- a/thirdparty/google_appengine/google/appengine/api/mail_service_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/mail_service_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -81,13 +81,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -187,22 +191,24 @@
     if self.has_data_: res+=prefix+("Data: %s\n" % self.DebugFormatString(self.data_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kFileName = 1
   kData = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "FileName",
-   "Data",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "FileName",
+    2: "Data",
+  }, 2)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -532,6 +538,10 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kSender = 1
   kReplyTo = 2
   kTo = 3
@@ -542,40 +552,31 @@
   kHtmlBody = 8
   kAttachment = 9
 
-  _TEXT = (
-   "ErrorCode",
-   "Sender",
-   "ReplyTo",
-   "To",
-   "Cc",
-   "Bcc",
-   "Subject",
-   "TextBody",
-   "HtmlBody",
-   "Attachment",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Sender",
+    2: "ReplyTo",
+    3: "To",
+    4: "Cc",
+    5: "Bcc",
+    6: "Subject",
+    7: "TextBody",
+    8: "HtmlBody",
+    9: "Attachment",
+  }, 9)
 
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.STRING,
+    7: ProtocolBuffer.Encoder.STRING,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.STRING,
+  }, 9, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/memcache/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -346,7 +346,14 @@
       return None
 
     if not response.has_stats():
-      return None
+      return {
+        STAT_HITS: 0,
+        STAT_MISSES: 0,
+        STAT_BYTE_HITS: 0,
+        STAT_ITEMS: 0,
+        STAT_BYTES: 0,
+        STAT_OLDEST_ITEM_AGES: 0,
+      }
 
     stats = response.stats()
     return {
@@ -770,15 +777,16 @@
                                        time=time, key_prefix=key_prefix,
                                        namespace=namespace)
 
-  def incr(self, key, delta=1, namespace=None):
+  def incr(self, key, delta=1, namespace=None, initial_value=None):
     """Atomically increments a key's value.
 
     Internally, the value is a unsigned 64-bit integer.  Memcache
     doesn't check 64-bit overflows.  The value, if too large, will
     wrap around.
 
-    The key must already exist in the cache to be incremented.  To
-    initialize a counter, set() it to the initial value, as an
+    Unless an initial_value is specified, the key must already exist
+    in the cache to be incremented.  To initialize a counter, either
+    specify initial_value or set() it to the initial value, as an
     ASCII decimal integer.  Future get()s of the key, post-increment,
     will still be an ASCII decimal value.
 
@@ -788,6 +796,9 @@
         defaulting to 1.
       namespace: a string specifying an optional namespace to use in
         the request.
+      initial_value: initial value to put in the cache, if it doesn't
+        already exist.  The default value, None, will not create a cache
+        entry if it doesn't already exist.
 
     Returns:
       New long integer value, or None if key was not in the cache, could not
@@ -798,9 +809,10 @@
       ValueError: If number is negative.
       TypeError: If delta isn't an int or long.
     """
-    return self._incrdecr(key, False, delta, namespace=namespace)
+    return self._incrdecr(key, False, delta, namespace=namespace,
+                          initial_value=initial_value)
 
-  def decr(self, key, delta=1, namespace=None):
+  def decr(self, key, delta=1, namespace=None, initial_value=None):
     """Atomically decrements a key's value.
 
     Internally, the value is a unsigned 64-bit integer.  Memcache
@@ -815,6 +827,9 @@
         defaulting to 1.
       namespace: a string specifying an optional namespace to use in
         the request.
+      initial_value: initial value to put in the cache, if it doesn't
+        already exist.  The default value, None, will not create a cache
+        entry if it doesn't already exist.
 
     Returns:
       New long integer value, or None if key wasn't in cache and couldn't
@@ -824,9 +839,11 @@
       ValueError: If number is negative.
       TypeError: If delta isn't an int or long.
     """
-    return self._incrdecr(key, True, delta, namespace=namespace)
+    return self._incrdecr(key, True, delta, namespace=namespace,
+                          initial_value=initial_value)
 
-  def _incrdecr(self, key, is_negative, delta, namespace=None):
+  def _incrdecr(self, key, is_negative, delta, namespace=None,
+                initial_value=None):
     """Increment or decrement a key by a provided delta.
 
     Args:
@@ -836,6 +853,9 @@
         or decrement by.
       namespace: a string specifying an optional namespace to use in
         the request.
+      initial_value: initial value to put in the cache, if it doesn't
+        already exist.  The default value, None, will not create a cache
+        entry if it doesn't already exist.
 
     Returns:
       New long integer value, or None on cache miss or network/RPC/server
@@ -859,6 +879,8 @@
       request.set_direction(MemcacheIncrementRequest.DECREMENT)
     else:
       request.set_direction(MemcacheIncrementRequest.INCREMENT)
+    if initial_value is not None:
+      request.set_initial_value(long(initial_value))
 
     try:
       self._make_sync_call('memcache', 'Increment', request, response)
--- a/thirdparty/google_appengine/google/appengine/api/memcache/memcache_service_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/memcache_service_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -22,7 +22,6 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
-from google.appengine.api.api_base_pb import *
 class MemcacheServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
@@ -75,13 +74,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -183,22 +186,24 @@
     if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 1
   kname_space = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "key",
-   "name_space",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "name_space",
+  }, 2)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -403,30 +408,30 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kItemGroup = 1
   kItemkey = 2
   kItemvalue = 3
   kItemflags = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "Item",
-   "key",
-   "value",
-   "flags",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Item",
+    2: "key",
+    3: "value",
+    4: "flags",
+  }, 4)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.FLOAT,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -726,6 +731,10 @@
     if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kItemGroup = 1
   kItemkey = 2
   kItemvalue = 3
@@ -734,34 +743,27 @@
   kItemexpiration_time = 6
   kname_space = 7
 
-  _TEXT = (
-   "ErrorCode",
-   "Item",
-   "key",
-   "value",
-   "flags",
-   "set_policy",
-   "expiration_time",
-   "name_space",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Item",
+    2: "key",
+    3: "value",
+    4: "flags",
+    5: "set_policy",
+    6: "expiration_time",
+    7: "name_space",
+  }, 7)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.FLOAT,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+    6: ProtocolBuffer.Encoder.FLOAT,
+    7: ProtocolBuffer.Encoder.STRING,
+  }, 7, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -850,18 +852,21 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kset_status = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "set_status",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "set_status",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1063,30 +1068,30 @@
     if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kItemGroup = 1
   kItemkey = 2
   kItemdelete_time = 3
   kname_space = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "Item",
-   "key",
-   "delete_time",
-   "name_space",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Item",
+    2: "key",
+    3: "delete_time",
+    4: "name_space",
+  }, 4)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.FLOAT,
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1173,18 +1178,21 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kdelete_status = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "delete_status",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "delete_status",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1209,6 +1217,8 @@
   delta_ = 1
   has_direction_ = 0
   direction_ = 1
+  has_initial_value_ = 0
+  initial_value_ = 0
 
   def __init__(self, contents=None):
     if contents is not None: self.MergeFromString(contents)
@@ -1265,6 +1275,19 @@
 
   def has_direction(self): return self.has_direction_
 
+  def initial_value(self): return self.initial_value_
+
+  def set_initial_value(self, x):
+    self.has_initial_value_ = 1
+    self.initial_value_ = x
+
+  def clear_initial_value(self):
+    if self.has_initial_value_:
+      self.has_initial_value_ = 0
+      self.initial_value_ = 0
+
+  def has_initial_value(self): return self.has_initial_value_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -1272,6 +1295,7 @@
     if (x.has_name_space()): self.set_name_space(x.name_space())
     if (x.has_delta()): self.set_delta(x.delta())
     if (x.has_direction()): self.set_direction(x.direction())
+    if (x.has_initial_value()): self.set_initial_value(x.initial_value())
 
   def Equals(self, x):
     if x is self: return 1
@@ -1283,6 +1307,8 @@
     if self.has_delta_ and self.delta_ != x.delta_: return 0
     if self.has_direction_ != x.has_direction_: return 0
     if self.has_direction_ and self.direction_ != x.direction_: return 0
+    if self.has_initial_value_ != x.has_initial_value_: return 0
+    if self.has_initial_value_ and self.initial_value_ != x.initial_value_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -1299,6 +1325,7 @@
     if (self.has_name_space_): n += 1 + self.lengthString(len(self.name_space_))
     if (self.has_delta_): n += 1 + self.lengthVarInt64(self.delta_)
     if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
+    if (self.has_initial_value_): n += 1 + self.lengthVarInt64(self.initial_value_)
     return n + 1
 
   def Clear(self):
@@ -1306,6 +1333,7 @@
     self.clear_name_space()
     self.clear_delta()
     self.clear_direction()
+    self.clear_initial_value()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -1319,6 +1347,9 @@
     if (self.has_name_space_):
       out.putVarInt32(34)
       out.putPrefixedString(self.name_space_)
+    if (self.has_initial_value_):
+      out.putVarInt32(40)
+      out.putVarUint64(self.initial_value_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -1335,6 +1366,9 @@
       if tt == 34:
         self.set_name_space(d.getPrefixedString())
         continue
+      if tt == 40:
+        self.set_initial_value(d.getVarUint64())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -1345,32 +1379,36 @@
     if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
     if self.has_delta_: res+=prefix+("delta: %s\n" % self.DebugFormatInt64(self.delta_))
     if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
+    if self.has_initial_value_: res+=prefix+("initial_value: %s\n" % self.DebugFormatInt64(self.initial_value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 1
   kname_space = 4
   kdelta = 2
   kdirection = 3
-
-  _TEXT = (
-   "ErrorCode",
-   "key",
-   "delta",
-   "direction",
-   "name_space",
-  )
+  kinitial_value = 5
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "delta",
+    3: "direction",
+    4: "name_space",
+    5: "initial_value",
+  }, 5)
 
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1437,18 +1475,21 @@
     if self.has_new_value_: res+=prefix+("new_value: %s\n" % self.DebugFormatInt64(self.new_value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   knew_value = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "new_value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "new_value",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1492,13 +1533,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1542,13 +1587,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1592,13 +1641,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1817,6 +1870,10 @@
     if self.has_oldest_item_age_: res+=prefix+("oldest_item_age: %s\n" % self.DebugFormatFixed32(self.oldest_item_age_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   khits = 1
   kmisses = 2
   kbyte_hits = 3
@@ -1824,31 +1881,25 @@
   kbytes = 5
   koldest_item_age = 6
 
-  _TEXT = (
-   "ErrorCode",
-   "hits",
-   "misses",
-   "byte_hits",
-   "items",
-   "bytes",
-   "oldest_item_age",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "hits",
+    2: "misses",
+    3: "byte_hits",
+    4: "items",
+    5: "bytes",
+    6: "oldest_item_age",
+  }, 6)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+    6: ProtocolBuffer.Encoder.FLOAT,
+  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1929,18 +1980,21 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kstats = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "stats",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "stats",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/memcache/memcache_stub.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/memcache_stub.py	Mon Sep 07 20:27:37 2009 +0200
@@ -229,7 +229,16 @@
     key = request.key()
     entry = self._GetKey(namespace, key)
     if entry is None:
-      return
+      if not request.has_initial_value():
+        return
+      if namespace not in self._the_cache:
+        self._the_cache[namespace] = {}
+      self._the_cache[namespace][key] = CacheEntry(str(request.initial_value()),
+                                                   expiration=0,
+                                                   flags=0,
+                                                   gettime=self._gettime)
+      entry = self._GetKey(namespace, key)
+      assert entry is not None
 
     try:
       old_value = long(entry.value)
--- a/thirdparty/google_appengine/google/appengine/api/namespace_manager/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/namespace_manager/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -27,8 +27,8 @@
 import os
 
 ENV_DEFAULT_NAMESPACE = 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'
+ENV_CURRENT_NAMESPACE = '__INTERNAL_CURRENT_NAMESPACE'
 
-__default_namespace = None
 
 def set_request_namespace(namespace):
   """Set the default namespace to use for future calls, for this request only.
@@ -37,20 +37,28 @@
     namespace: A string naming the new namespace to use. The empty
       string specifies the root namespace for this app.
   """
-  global __default_namespace
-  __default_namespace = namespace
+  os.environ[ENV_CURRENT_NAMESPACE] = namespace
 
 
 def get_request_namespace():
-  """Get the name of the current default namespace. The empty string
-  indicates that the root namespace is the default."""
-  global __default_namespace
-  if __default_namespace is None:
+  """Get the name of the current default namespace.
+
+  The empty string indicates that the root namespace is the default.
+  """
+  return os.getenv(ENV_CURRENT_NAMESPACE, '')
+
+
+def _enable_request_namespace():
+  """Automatically enable namespace to default for domain.
+
+  Calling this function will automatically default the namespace to the
+  chosen Google Apps domain for the current request.
+  """
+  if ENV_CURRENT_NAMESPACE not in os.environ:
     if ENV_DEFAULT_NAMESPACE in os.environ:
-      __default_namespace = os.environ[ENV_DEFAULT_NAMESPACE]
+      os.environ[ENV_CURRENT_NAMESPACE] = os.environ[ENV_DEFAULT_NAMESPACE]
     else:
-      __default_namespace = ''
-  return __default_namespace
+      os.environ[ENV_CURRENT_NAMESPACE] = ''
 
 
 def _add_name_space(request, namespace=None):
--- a/thirdparty/google_appengine/google/appengine/api/queueinfo.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/queueinfo.py	Mon Sep 07 20:27:37 2009 +0200
@@ -21,20 +21,22 @@
 for an application. Supports loading the records from queue.yaml.
 
 A queue has two required parameters and one optional one. The required
-parameters are 'name' (must be unique for an appid) and 'rate' (the
-rate at which jobs in the queue are run). There is an optional 'bucket_size'
-that will allow tokens to be 'saved up' and bucket_size. Rate and bucket_size rate are
-expressed as number/unit, with number being an int or a float, and unit being
-one of 's' (seconds), 'm' (minutes), 'h' (hours) or 'd' (days).
+parameters are 'name' (must be unique for an appid) and 'rate' (the rate
+at which jobs in the queue are run). There is an optional parameter
+'bucket_size' that will allow tokens to be 'saved up' (for more on the
+algorithm, see http://en.wikipedia.org/wiki/Token_Bucket). rate is expressed
+as number/unit, with number being an int or a float, and unit being one of
+'s' (seconds), 'm' (minutes), 'h' (hours) or 'd' (days). bucket_size is
+an integer.
 
-An example of the use of bucket_size rate: the free email quota is 2000/d, and the
-maximum you can send in a single minute is 11. So we can define a queue for
-sending email like this:
+An example of the use of bucket_size rate: the free email quota is 2000/d,
+and the maximum you can send in a single minute is 11. So we can define a
+queue for sending email like this:
 
 queue:
 - name: mail_queue
   rate: 2000/d
-  bucket_size: 10/m
+  bucket_size: 10
 
 If this queue had been idle for a while before some jobs were submitted to it,
 the first 10 jobs submitted would be run immediately, then subsequent ones
@@ -49,7 +51,7 @@
 from google.appengine.api import yaml_object
 
 _NAME_REGEX = r'^[A-Za-z0-9-]{0,499}$'
-_RATE_REGEX = r'^[0-9]+(\.[0-9]+)?/[smhd]'
+_RATE_REGEX = r'^(0|[0-9]+(\.[0-9]*)?/[smhd])'
 
 QUEUE = 'queue'
 
@@ -102,7 +104,7 @@
 
 
 def ParseRate(rate):
-  """Parses a rate string in the form number/unit.
+  """Parses a rate string in the form number/unit, or the literal 0.
 
   The unit is one of s (seconds), m (minutes), h (hours) or d (days).
 
@@ -115,6 +117,8 @@
   Raises:
     MalformedQueueConfiguration: if the rate is invalid
   """
+  if rate == "0":
+    return 0.0
   elements = rate.split('/')
   if len(elements) != 2:
     raise MalformedQueueConfiguration('Rate "%s" is invalid.' % rate)
--- a/thirdparty/google_appengine/google/appengine/api/quota.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/quota.py	Mon Sep 07 20:27:37 2009 +0200
@@ -37,3 +37,35 @@
   if _apphosting_runtime___python__apiproxy:
     return _apphosting_runtime___python__apiproxy.get_request_cpu_usage()
   return 0
+
+def get_request_api_cpu_usage():
+  """Get the amount of CPU used so far by API calls during the current request.
+
+  Returns the number of megacycles used so far by API calls for the current
+  request. Does not include CPU used by code in the request itself.
+
+  Does nothing when used in the dev_appserver.
+  """
+
+  if _apphosting_runtime___python__apiproxy:
+    return _apphosting_runtime___python__apiproxy.get_request_api_cpu_usage()
+  return 0
+
+MCYCLES_PER_SECOND = 1200.0
+"""Megacycles to CPU seconds.  Convert by using a 1.2 GHz 64-bit x86 CPU."""
+
+def megacycles_to_cpu_seconds(mcycles):
+  """Convert an input value in megacycles to CPU-seconds.
+
+  Returns a double representing the CPU-seconds the input megacycle value
+  converts to.
+  """
+  return mcycles / MCYCLES_PER_SECOND
+
+def cpu_seconds_to_megacycles(cpu_secs):
+  """Convert an input value in CPU-seconds to megacycles.
+
+  Returns an integer representing the megacycles the input CPU-seconds value
+  converts to.
+  """
+  return int(cpu_secs * MCYCLES_PER_SECOND)
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -22,7 +22,6 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
-from google.appengine.api.api_base_pb import *
 class URLFetchServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
@@ -83,13 +82,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -426,6 +429,10 @@
     if self.has_deadline_: res+=prefix+("Deadline: %s\n" % self.DebugFormat(self.deadline_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kMethod = 1
   kUrl = 2
   kHeaderGroup = 3
@@ -435,37 +442,29 @@
   kFollowRedirects = 7
   kDeadline = 8
 
-  _TEXT = (
-   "ErrorCode",
-   "Method",
-   "Url",
-   "Header",
-   "Key",
-   "Value",
-   "Payload",
-   "FollowRedirects",
-   "Deadline",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Method",
+    2: "Url",
+    3: "Header",
+    4: "Key",
+    5: "Value",
+    6: "Payload",
+    7: "FollowRedirects",
+    8: "Deadline",
+  }, 8)
 
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STARTGROUP,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.STRING,
+    7: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.DOUBLE,
+  }, 8, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -781,6 +780,10 @@
     if self.has_externalbytesreceived_: res+=prefix+("ExternalBytesReceived: %s\n" % self.DebugFormatInt64(self.externalbytesreceived_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kContent = 1
   kStatusCode = 2
   kHeaderGroup = 3
@@ -790,37 +793,29 @@
   kExternalBytesSent = 7
   kExternalBytesReceived = 8
 
-  _TEXT = (
-   "ErrorCode",
-   "Content",
-   "StatusCode",
-   "Header",
-   "Key",
-   "Value",
-   "ContentWasTruncated",
-   "ExternalBytesSent",
-   "ExternalBytesReceived",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Content",
+    2: "StatusCode",
+    3: "Header",
+    4: "Key",
+    5: "Value",
+    6: "ContentWasTruncated",
+    7: "ExternalBytesSent",
+    8: "ExternalBytesReceived",
+  }, 8)
 
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STARTGROUP,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+    7: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.NUMERIC,
+  }, 8, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py	Mon Sep 07 20:27:37 2009 +0200
@@ -55,7 +55,6 @@
 _UNTRUSTED_REQUEST_HEADERS = frozenset([
   'content-length',
   'host',
-  'referer',
   'vary',
   'via',
   'x-forwarded-for',
@@ -168,7 +167,6 @@
       adjusted_headers = {
           'User-Agent':
           'AppEngine-Google; (+http://code.google.com/appengine)',
-          'Referer': 'http://localhost/',
           'Host': host,
           'Accept-Encoding': 'gzip',
       }
@@ -212,7 +210,10 @@
           socket.setdefaulttimeout(deadline)
           connection.request(method, full_path, payload, adjusted_headers)
           http_response = connection.getresponse()
-          http_response_data = http_response.read()
+          if method == 'HEAD':
+            http_response_data = ''
+          else:
+            http_response_data = http_response.read()
         finally:
           socket.setdefaulttimeout(orig_timeout)
           connection.close()
@@ -239,7 +240,7 @@
               header_value == 'gzip'):
             continue
           if header_key.lower() == 'content-length':
-            header_value = len(response.content())
+            header_value = str(len(response.content()))
           header_proto = response.add_header()
           header_proto.set_key(header_key)
           header_proto.set_value(header_value)
--- a/thirdparty/google_appengine/google/appengine/api/user_service_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/user_service_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -77,15 +77,415 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class CreateLoginURLRequest(ProtocolBuffer.ProtocolMessage):
+  has_destination_url_ = 0
+  destination_url_ = ""
+  has_auth_domain_ = 0
+  auth_domain_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def destination_url(self): return self.destination_url_
+
+  def set_destination_url(self, x):
+    self.has_destination_url_ = 1
+    self.destination_url_ = x
+
+  def clear_destination_url(self):
+    if self.has_destination_url_:
+      self.has_destination_url_ = 0
+      self.destination_url_ = ""
+
+  def has_destination_url(self): return self.has_destination_url_
+
+  def auth_domain(self): return self.auth_domain_
+
+  def set_auth_domain(self, x):
+    self.has_auth_domain_ = 1
+    self.auth_domain_ = x
+
+  def clear_auth_domain(self):
+    if self.has_auth_domain_:
+      self.has_auth_domain_ = 0
+      self.auth_domain_ = ""
+
+  def has_auth_domain(self): return self.has_auth_domain_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_destination_url()): self.set_destination_url(x.destination_url())
+    if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_destination_url_ != x.has_destination_url_: return 0
+    if self.has_destination_url_ and self.destination_url_ != x.destination_url_: return 0
+    if self.has_auth_domain_ != x.has_auth_domain_: return 0
+    if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_destination_url_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: destination_url not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.destination_url_))
+    if (self.has_auth_domain_): n += 1 + self.lengthString(len(self.auth_domain_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_destination_url()
+    self.clear_auth_domain()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.destination_url_)
+    if (self.has_auth_domain_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.auth_domain_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_destination_url(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_auth_domain(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_destination_url_: res+=prefix+("destination_url: %s\n" % self.DebugFormatString(self.destination_url_))
+    if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kdestination_url = 1
+  kauth_domain = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "destination_url",
+    2: "auth_domain",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class CreateLoginURLResponse(ProtocolBuffer.ProtocolMessage):
+  has_login_url_ = 0
+  login_url_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def login_url(self): return self.login_url_
+
+  def set_login_url(self, x):
+    self.has_login_url_ = 1
+    self.login_url_ = x
+
+  def clear_login_url(self):
+    if self.has_login_url_:
+      self.has_login_url_ = 0
+      self.login_url_ = ""
+
+  def has_login_url(self): return self.has_login_url_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_login_url()): self.set_login_url(x.login_url())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_login_url_ != x.has_login_url_: return 0
+    if self.has_login_url_ and self.login_url_ != x.login_url_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_login_url_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: login_url not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.login_url_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_login_url()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.login_url_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_login_url(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_login_url_: res+=prefix+("login_url: %s\n" % self.DebugFormatString(self.login_url_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  klogin_url = 1
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "login_url",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class CreateLogoutURLRequest(ProtocolBuffer.ProtocolMessage):
+  has_destination_url_ = 0
+  destination_url_ = ""
+  has_auth_domain_ = 0
+  auth_domain_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def destination_url(self): return self.destination_url_
+
+  def set_destination_url(self, x):
+    self.has_destination_url_ = 1
+    self.destination_url_ = x
+
+  def clear_destination_url(self):
+    if self.has_destination_url_:
+      self.has_destination_url_ = 0
+      self.destination_url_ = ""
+
+  def has_destination_url(self): return self.has_destination_url_
+
+  def auth_domain(self): return self.auth_domain_
+
+  def set_auth_domain(self, x):
+    self.has_auth_domain_ = 1
+    self.auth_domain_ = x
+
+  def clear_auth_domain(self):
+    if self.has_auth_domain_:
+      self.has_auth_domain_ = 0
+      self.auth_domain_ = ""
+
+  def has_auth_domain(self): return self.has_auth_domain_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_destination_url()): self.set_destination_url(x.destination_url())
+    if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_destination_url_ != x.has_destination_url_: return 0
+    if self.has_destination_url_ and self.destination_url_ != x.destination_url_: return 0
+    if self.has_auth_domain_ != x.has_auth_domain_: return 0
+    if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_destination_url_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: destination_url not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.destination_url_))
+    if (self.has_auth_domain_): n += 1 + self.lengthString(len(self.auth_domain_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_destination_url()
+    self.clear_auth_domain()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.destination_url_)
+    if (self.has_auth_domain_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.auth_domain_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_destination_url(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_auth_domain(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_destination_url_: res+=prefix+("destination_url: %s\n" % self.DebugFormatString(self.destination_url_))
+    if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kdestination_url = 1
+  kauth_domain = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "destination_url",
+    2: "auth_domain",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class CreateLogoutURLResponse(ProtocolBuffer.ProtocolMessage):
+  has_logout_url_ = 0
+  logout_url_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def logout_url(self): return self.logout_url_
+
+  def set_logout_url(self, x):
+    self.has_logout_url_ = 1
+    self.logout_url_ = x
+
+  def clear_logout_url(self):
+    if self.has_logout_url_:
+      self.has_logout_url_ = 0
+      self.logout_url_ = ""
+
+  def has_logout_url(self): return self.has_logout_url_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_logout_url()): self.set_logout_url(x.logout_url())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_logout_url_ != x.has_logout_url_: return 0
+    if self.has_logout_url_ and self.logout_url_ != x.logout_url_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_logout_url_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: logout_url not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.logout_url_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_logout_url()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.logout_url_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_logout_url(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_logout_url_: res+=prefix+("logout_url: %s\n" % self.DebugFormatString(self.logout_url_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  klogout_url = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "logout_url",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 
-__all__ = ['UserServiceError']
+__all__ = ['UserServiceError','CreateLoginURLRequest','CreateLoginURLResponse','CreateLogoutURLRequest','CreateLogoutURLResponse']
--- a/thirdparty/google_appengine/google/appengine/api/user_service_stub.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/user_service_stub.py	Mon Sep 07 20:27:37 2009 +0200
@@ -65,9 +65,9 @@
       response: the login URL; a base.StringProto
     """
     self.__num_requests += 1
-    response.set_value(
+    response.set_login_url(
         self._login_url %
-        urllib.quote(self._AddHostToContinueURL(request.value())))
+        urllib.quote(self._AddHostToContinueURL(request.destination_url())))
 
   def _Dynamic_CreateLogoutURL(self, request, response):
     """Trivial implementation of UserService.CreateLogoutURL().
@@ -77,9 +77,9 @@
       response: the logout URL; a base.StringProto
     """
     self.__num_requests += 1
-    response.set_value(
+    response.set_logout_url(
         self._logout_url %
-        urllib.quote(self._AddHostToContinueURL(request.value())))
+        urllib.quote(self._AddHostToContinueURL(request.destination_url())))
 
   def _AddHostToContinueURL(self, continue_url):
     """Adds the request host to the continue url if no host is specified.
--- a/thirdparty/google_appengine/google/appengine/api/users.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/users.py	Mon Sep 07 20:27:37 2009 +0200
@@ -162,9 +162,9 @@
   Returns:
     string
   """
-  req = user_service_pb.StringProto()
-  resp = user_service_pb.StringProto()
-  req.set_value(dest_url)
+  req = user_service_pb.CreateLoginURLRequest()
+  resp = user_service_pb.CreateLoginURLResponse()
+  req.set_destination_url(dest_url)
   try:
     apiproxy_stub_map.MakeSyncCall('user', 'CreateLoginURL', req, resp)
   except apiproxy_errors.ApplicationError, e:
@@ -176,7 +176,7 @@
       raise NotAllowedError
     else:
       raise e
-  return resp.value()
+  return resp.login_url()
 
 CreateLoginURL = create_login_url
 
@@ -192,9 +192,9 @@
   Returns:
     string
   """
-  req = user_service_pb.StringProto()
-  resp = user_service_pb.StringProto()
-  req.set_value(dest_url)
+  req = user_service_pb.CreateLogoutURLRequest()
+  resp = user_service_pb.CreateLogoutURLResponse()
+  req.set_destination_url(dest_url)
   try:
     apiproxy_stub_map.MakeSyncCall('user', 'CreateLogoutURL', req, resp)
   except apiproxy_errors.ApplicationError, e:
@@ -203,7 +203,7 @@
       raise RedirectTooLongError
     else:
       raise e
-  return resp.value()
+  return resp.logout_url()
 
 CreateLogoutURL = create_logout_url
 
--- a/thirdparty/google_appengine/google/appengine/api/validation.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/validation.py	Mon Sep 07 20:27:37 2009 +0200
@@ -919,7 +919,9 @@
                             'but found \'%s\'.' % value)
 
     for item in value:
-      if not isinstance(item, self.constructor):
+      if isinstance(self.constructor, Validator):
+        item = self.constructor.Validate(item)
+      elif not isinstance(item, self.constructor):
         raise ValidationError('Repeated items must be %s, but found \'%s\'.'
                               % (str(self.constructor), str(item)))
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/xmpp/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,332 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""XMPP API.
+
+This module allows AppEngine apps to interact with a bot representing that app
+on the Google Talk network.
+
+Functions defined in this module:
+  get_presence: Gets the presence for a JID.
+  send_message: Sends a chat message to any number of JIDs.
+  send_invite: Sends an invitation to chat to a JID.
+
+Classes defined in this module:
+  Message: A class to encapsulate received messages.
+"""
+
+
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api.xmpp import xmpp_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+NO_ERROR    = xmpp_service_pb.XmppMessageResponse.NO_ERROR
+INVALID_JID = xmpp_service_pb.XmppMessageResponse.INVALID_JID
+OTHER_ERROR = xmpp_service_pb.XmppMessageResponse.OTHER_ERROR
+
+
+MESSAGE_TYPE_NONE = ""
+MESSAGE_TYPE_CHAT = "chat"
+MESSAGE_TYPE_ERROR = "error"
+MESSAGE_TYPE_GROUPCHAT = "groupchat"
+MESSAGE_TYPE_HEADLINE = "headline"
+MESSAGE_TYPE_NORMAL = "normal"
+
+_VALID_MESSAGE_TYPES = frozenset([MESSAGE_TYPE_NONE, MESSAGE_TYPE_CHAT,
+                                  MESSAGE_TYPE_ERROR, MESSAGE_TYPE_GROUPCHAT,
+                                  MESSAGE_TYPE_HEADLINE, MESSAGE_TYPE_NORMAL])
+
+
+class Error(Exception):
+  """Base error class for this module."""
+
+
+class InvalidJidError(Error):
+  """Error that indicates a request for an invalid JID."""
+
+
+class InvalidTypeError(Error):
+  """Error that indicates a send message request has an invalid type."""
+
+
+class InvalidXmlError(Error):
+  """Error that indicates a send message request has invalid XML."""
+
+
+class NoBodyError(Error):
+  """Error that indicates a send message request has no body."""
+
+
+class InvalidMessageError(Error):
+  """Error that indicates a received message was invalid or incomplete."""
+
+
+def get_presence(jid, from_jid=None):
+  """Gets the presence for a JID.
+
+  Args:
+    jid: The JID of the contact whose presence is requested.
+    from_jid: The optional custom JID to use for sending. Currently, the default
+      is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+      of the form <anything>@<appid>.appspotchat.com.
+
+  Returns:
+    bool, Whether the user is online.
+
+  Raises:
+    InvalidJidError if any of the JIDs passed are invalid.
+    Error if an unspecified error happens processing the request.
+  """
+  if not jid:
+    raise InvalidJidError()
+
+  request = xmpp_service_pb.PresenceRequest()
+  response = xmpp_service_pb.PresenceResponse()
+
+  request.set_jid(_to_str(jid))
+  if from_jid:
+    request.set_from_jid(_to_str(from_jid))
+
+  try:
+    apiproxy_stub_map.MakeSyncCall("xmpp",
+                                   "GetPresence",
+                                   request,
+                                   response)
+  except apiproxy_errors.ApplicationError, e:
+    if (e.application_error ==
+        xmpp_service_pb.XmppServiceError.INVALID_JID):
+      raise InvalidJidError()
+    else:
+      raise Error()
+
+  return bool(response.is_available())
+
+
+def send_invite(jid, from_jid=None):
+  """Sends an invitation to chat to a JID.
+
+  Args:
+    jid: The JID of the contact to invite.
+    from_jid: The optional custom JID to use for sending. Currently, the default
+      is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+      of the form <anything>@<appid>.appspotchat.com.
+
+  Raises:
+    InvalidJidError if the JID passed is invalid.
+    Error if an unspecified error happens processing the request.
+  """
+  if not jid:
+    raise InvalidJidError()
+
+  request = xmpp_service_pb.XmppInviteRequest()
+  response = xmpp_service_pb.XmppInviteResponse()
+
+  request.set_jid(_to_str(jid))
+  if from_jid:
+    request.set_from_jid(_to_str(from_jid))
+
+  try:
+    apiproxy_stub_map.MakeSyncCall("xmpp",
+                                   "SendInvite",
+                                   request,
+                                   response)
+  except apiproxy_errors.ApplicationError, e:
+    if (e.application_error ==
+        xmpp_service_pb.XmppServiceError.INVALID_JID):
+      raise InvalidJidError()
+    else:
+      raise Error()
+
+  return
+
+
+def send_message(jids, body, from_jid=None, message_type=MESSAGE_TYPE_CHAT,
+                 raw_xml=False):
+  """Sends a chat message to a list of JIDs.
+
+  Args:
+    jids: A list of JIDs to send the message to, or a single JID to send the
+      message to.
+    from_jid: The optional custom JID to use for sending. Currently, the default
+      is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+      of the form <anything>@<appid>.appspotchat.com.
+    body: The body of the message.
+    message_type: Optional type of the message. Should be one of the types
+      specified in RFC 3921, section 2.1.1. An empty string will result in a
+      message stanza without a type attribute. For convenience, all of the
+      valid types are in the MESSAGE_TYPE_* constants in this file. The
+      default is MESSAGE_TYPE_CHAT. Anything else will throw an exception.
+    raw_xml: Optionally specifies that the body should be interpreted as XML. If
+      this is false, the contents of the body will be escaped and placed inside
+      of a body element inside of the message. If this is true, the contents
+      will be made children of the message.
+
+  Returns:
+    list, A list of statuses, one for each JID, corresponding to the result of
+      sending the message to that JID. Or, if a single JID was passed in,
+      returns the status directly.
+
+  Raises:
+    InvalidJidError if there is no valid JID in the list.
+    InvalidTypeError if the type argument is invalid.
+    InvalidXmlError if the body is malformed XML and raw_xml is True.
+    NoBodyError if there is no body.
+    Error if another error occurs processing the request.
+  """
+  request = xmpp_service_pb.XmppMessageRequest()
+  response = xmpp_service_pb.XmppMessageResponse()
+
+  if not body:
+    raise NoBodyError()
+
+  if not jids:
+    raise InvalidJidError()
+
+  if not message_type in _VALID_MESSAGE_TYPES:
+    raise InvalidTypeError()
+
+  single_jid = False
+  if isinstance(jids, basestring):
+    single_jid = True
+    jids = [jids]
+
+  for jid in jids:
+    if not jid:
+      raise InvalidJidError()
+    request.add_jid(_to_str(jid))
+
+  request.set_body(_to_str(body))
+  request.set_type(_to_str(message_type))
+  request.set_raw_xml(raw_xml)
+  if from_jid:
+    request.set_from_jid(_to_str(from_jid))
+
+  try:
+    apiproxy_stub_map.MakeSyncCall("xmpp",
+                                   "SendMessage",
+                                   request,
+                                   response)
+  except apiproxy_errors.ApplicationError, e:
+    if (e.application_error ==
+        xmpp_service_pb.XmppServiceError.INVALID_JID):
+      raise InvalidJidError()
+    elif (e.application_error ==
+          xmpp_service_pb.XmppServiceError.INVALID_TYPE):
+      raise InvalidTypeError()
+    elif (e.application_error ==
+          xmpp_service_pb.XmppServiceError.INVALID_XML):
+      raise InvalidXmlError()
+    elif (e.application_error ==
+          xmpp_service_pb.XmppServiceError.NO_BODY):
+      raise NoBodyError()
+    raise Error()
+
+  if single_jid:
+    return response.status_list()[0]
+  return response.status_list()
+
+
+class Message(object):
+  """Encapsulates an XMPP message received by the application."""
+
+  def __init__(self, vars):
+    """Constructs a new XMPP Message from an HTTP request.
+
+    Args:
+      vars: A dict-like object to extract message arguments from.
+    """
+    try:
+      self.__sender = vars["from"]
+      self.__to = vars["to"]
+      self.__body = vars["body"]
+    except KeyError, e:
+      raise InvalidMessageError(e[0])
+    self.__command = None
+    self.__arg = None
+
+  @property
+  def sender(self):
+    return self.__sender
+
+  @property
+  def to(self):
+    return self.__to
+
+  @property
+  def body(self):
+    return self.__body
+
+  def __parse_command(self):
+    if self.__arg != None:
+      return
+
+    body = self.__body
+    if body.startswith('\\'):
+      body = '/' + body[1:]
+
+    self.__arg = ''
+    if body.startswith('/'):
+      parts = body.split(' ', 1)
+      self.__command = parts[0][1:]
+      if len(parts) > 1:
+        self.__arg = parts[1].strip()
+    else:
+      self.__arg = self.__body.strip()
+
+  @property
+  def command(self):
+    self.__parse_command()
+    return self.__command
+
+  @property
+  def arg(self):
+    self.__parse_command()
+    return self.__arg
+
+  def reply(self, body, message_type=MESSAGE_TYPE_CHAT, raw_xml=False,
+            send_message=send_message):
+    """Convenience function to reply to a message.
+
+    Args:
+      body: str: The body of the message
+      message_type, raw_xml: As per send_message.
+      send_message: Used for testing.
+
+    Returns:
+      A status code as per send_message.
+
+    Raises:
+      See send_message.
+    """
+    return send_message([self.sender], body, from_jid=self.to,
+                        message_type=message_type, raw_xml=raw_xml)
+
+
+def _to_str(value):
+  """Helper function to make sure unicode values converted to utf-8
+
+  Args:
+    value: str or unicode to convert to utf-8.
+
+  Returns:
+    UTF-8 encoded str of value, otherwise value unchanged.
+  """
+  if isinstance(value, unicode):
+    return value.encode('utf-8')
+  return value
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,826 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+                   unusednames=printElemNumber,debug_strs no-special"""
+
+class XmppServiceError(ProtocolBuffer.ProtocolMessage):
+
+  UNSPECIFIED_ERROR =    1
+  INVALID_JID  =    2
+  NO_BODY      =    3
+  INVALID_XML  =    4
+  INVALID_TYPE =    5
+
+  _ErrorCode_NAMES = {
+    1: "UNSPECIFIED_ERROR",
+    2: "INVALID_JID",
+    3: "NO_BODY",
+    4: "INVALID_XML",
+    5: "INVALID_TYPE",
+  }
+
+  def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+  ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+  def __init__(self, contents=None):
+    pass
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+
+  def Equals(self, x):
+    if x is self: return 1
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    return n + 0
+
+  def Clear(self):
+    pass
+
+  def OutputUnchecked(self, out):
+    pass
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class PresenceRequest(ProtocolBuffer.ProtocolMessage):
+  has_jid_ = 0
+  jid_ = ""
+  has_from_jid_ = 0
+  from_jid_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def jid(self): return self.jid_
+
+  def set_jid(self, x):
+    self.has_jid_ = 1
+    self.jid_ = x
+
+  def clear_jid(self):
+    if self.has_jid_:
+      self.has_jid_ = 0
+      self.jid_ = ""
+
+  def has_jid(self): return self.has_jid_
+
+  def from_jid(self): return self.from_jid_
+
+  def set_from_jid(self, x):
+    self.has_from_jid_ = 1
+    self.from_jid_ = x
+
+  def clear_from_jid(self):
+    if self.has_from_jid_:
+      self.has_from_jid_ = 0
+      self.from_jid_ = ""
+
+  def has_from_jid(self): return self.has_from_jid_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_jid()): self.set_jid(x.jid())
+    if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_jid_ != x.has_jid_: return 0
+    if self.has_jid_ and self.jid_ != x.jid_: return 0
+    if self.has_from_jid_ != x.has_from_jid_: return 0
+    if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_jid_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: jid not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.jid_))
+    if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_jid()
+    self.clear_from_jid()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.jid_)
+    if (self.has_from_jid_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.from_jid_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_jid(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_from_jid(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_jid_: res+=prefix+("jid: %s\n" % self.DebugFormatString(self.jid_))
+    if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kjid = 1
+  kfrom_jid = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "jid",
+    2: "from_jid",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class PresenceResponse(ProtocolBuffer.ProtocolMessage):
+
+  NORMAL       =    0
+  AWAY         =    1
+  DO_NOT_DISTURB =    2
+  CHAT         =    3
+  EXTENDED_AWAY =    4
+
+  _SHOW_NAMES = {
+    0: "NORMAL",
+    1: "AWAY",
+    2: "DO_NOT_DISTURB",
+    3: "CHAT",
+    4: "EXTENDED_AWAY",
+  }
+
+  def SHOW_Name(cls, x): return cls._SHOW_NAMES.get(x, "")
+  SHOW_Name = classmethod(SHOW_Name)
+
+  has_is_available_ = 0
+  is_available_ = 0
+  has_presence_ = 0
+  presence_ = 0
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def is_available(self): return self.is_available_
+
+  def set_is_available(self, x):
+    self.has_is_available_ = 1
+    self.is_available_ = x
+
+  def clear_is_available(self):
+    if self.has_is_available_:
+      self.has_is_available_ = 0
+      self.is_available_ = 0
+
+  def has_is_available(self): return self.has_is_available_
+
+  def presence(self): return self.presence_
+
+  def set_presence(self, x):
+    self.has_presence_ = 1
+    self.presence_ = x
+
+  def clear_presence(self):
+    if self.has_presence_:
+      self.has_presence_ = 0
+      self.presence_ = 0
+
+  def has_presence(self): return self.has_presence_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_is_available()): self.set_is_available(x.is_available())
+    if (x.has_presence()): self.set_presence(x.presence())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_is_available_ != x.has_is_available_: return 0
+    if self.has_is_available_ and self.is_available_ != x.is_available_: return 0
+    if self.has_presence_ != x.has_presence_: return 0
+    if self.has_presence_ and self.presence_ != x.presence_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_is_available_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: is_available not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_presence_): n += 1 + self.lengthVarInt64(self.presence_)
+    return n + 2
+
+  def Clear(self):
+    self.clear_is_available()
+    self.clear_presence()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(8)
+    out.putBoolean(self.is_available_)
+    if (self.has_presence_):
+      out.putVarInt32(16)
+      out.putVarInt32(self.presence_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_is_available(d.getBoolean())
+        continue
+      if tt == 16:
+        self.set_presence(d.getVarInt32())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_is_available_: res+=prefix+("is_available: %s\n" % self.DebugFormatBool(self.is_available_))
+    if self.has_presence_: res+=prefix+("presence: %s\n" % self.DebugFormatInt32(self.presence_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kis_available = 1
+  kpresence = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "is_available",
+    2: "presence",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class XmppMessageRequest(ProtocolBuffer.ProtocolMessage):
+  has_body_ = 0
+  body_ = ""
+  has_raw_xml_ = 0
+  raw_xml_ = 0
+  has_type_ = 0
+  type_ = "chat"
+  has_from_jid_ = 0
+  from_jid_ = ""
+
+  def __init__(self, contents=None):
+    self.jid_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def jid_size(self): return len(self.jid_)
+  def jid_list(self): return self.jid_
+
+  def jid(self, i):
+    return self.jid_[i]
+
+  def set_jid(self, i, x):
+    self.jid_[i] = x
+
+  def add_jid(self, x):
+    self.jid_.append(x)
+
+  def clear_jid(self):
+    self.jid_ = []
+
+  def body(self): return self.body_
+
+  def set_body(self, x):
+    self.has_body_ = 1
+    self.body_ = x
+
+  def clear_body(self):
+    if self.has_body_:
+      self.has_body_ = 0
+      self.body_ = ""
+
+  def has_body(self): return self.has_body_
+
+  def raw_xml(self): return self.raw_xml_
+
+  def set_raw_xml(self, x):
+    self.has_raw_xml_ = 1
+    self.raw_xml_ = x
+
+  def clear_raw_xml(self):
+    if self.has_raw_xml_:
+      self.has_raw_xml_ = 0
+      self.raw_xml_ = 0
+
+  def has_raw_xml(self): return self.has_raw_xml_
+
+  def type(self): return self.type_
+
+  def set_type(self, x):
+    self.has_type_ = 1
+    self.type_ = x
+
+  def clear_type(self):
+    if self.has_type_:
+      self.has_type_ = 0
+      self.type_ = "chat"
+
+  def has_type(self): return self.has_type_
+
+  def from_jid(self): return self.from_jid_
+
+  def set_from_jid(self, x):
+    self.has_from_jid_ = 1
+    self.from_jid_ = x
+
+  def clear_from_jid(self):
+    if self.has_from_jid_:
+      self.has_from_jid_ = 0
+      self.from_jid_ = ""
+
+  def has_from_jid(self): return self.has_from_jid_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    for i in xrange(x.jid_size()): self.add_jid(x.jid(i))
+    if (x.has_body()): self.set_body(x.body())
+    if (x.has_raw_xml()): self.set_raw_xml(x.raw_xml())
+    if (x.has_type()): self.set_type(x.type())
+    if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if len(self.jid_) != len(x.jid_): return 0
+    for e1, e2 in zip(self.jid_, x.jid_):
+      if e1 != e2: return 0
+    if self.has_body_ != x.has_body_: return 0
+    if self.has_body_ and self.body_ != x.body_: return 0
+    if self.has_raw_xml_ != x.has_raw_xml_: return 0
+    if self.has_raw_xml_ and self.raw_xml_ != x.raw_xml_: return 0
+    if self.has_type_ != x.has_type_: return 0
+    if self.has_type_ and self.type_ != x.type_: return 0
+    if self.has_from_jid_ != x.has_from_jid_: return 0
+    if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_body_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: body not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += 1 * len(self.jid_)
+    for i in xrange(len(self.jid_)): n += self.lengthString(len(self.jid_[i]))
+    n += self.lengthString(len(self.body_))
+    if (self.has_raw_xml_): n += 2
+    if (self.has_type_): n += 1 + self.lengthString(len(self.type_))
+    if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_jid()
+    self.clear_body()
+    self.clear_raw_xml()
+    self.clear_type()
+    self.clear_from_jid()
+
+  def OutputUnchecked(self, out):
+    for i in xrange(len(self.jid_)):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.jid_[i])
+    out.putVarInt32(18)
+    out.putPrefixedString(self.body_)
+    if (self.has_raw_xml_):
+      out.putVarInt32(24)
+      out.putBoolean(self.raw_xml_)
+    if (self.has_type_):
+      out.putVarInt32(34)
+      out.putPrefixedString(self.type_)
+    if (self.has_from_jid_):
+      out.putVarInt32(42)
+      out.putPrefixedString(self.from_jid_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.add_jid(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_body(d.getPrefixedString())
+        continue
+      if tt == 24:
+        self.set_raw_xml(d.getBoolean())
+        continue
+      if tt == 34:
+        self.set_type(d.getPrefixedString())
+        continue
+      if tt == 42:
+        self.set_from_jid(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    cnt=0
+    for e in self.jid_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("jid%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
+    if self.has_raw_xml_: res+=prefix+("raw_xml: %s\n" % self.DebugFormatBool(self.raw_xml_))
+    if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
+    if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kjid = 1
+  kbody = 2
+  kraw_xml = 3
+  ktype = 4
+  kfrom_jid = 5
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "jid",
+    2: "body",
+    3: "raw_xml",
+    4: "type",
+    5: "from_jid",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class XmppMessageResponse(ProtocolBuffer.ProtocolMessage):
+
+  NO_ERROR     =    0
+  INVALID_JID  =    1
+  OTHER_ERROR  =    2
+
+  _XmppMessageStatus_NAMES = {
+    0: "NO_ERROR",
+    1: "INVALID_JID",
+    2: "OTHER_ERROR",
+  }
+
+  def XmppMessageStatus_Name(cls, x): return cls._XmppMessageStatus_NAMES.get(x, "")
+  XmppMessageStatus_Name = classmethod(XmppMessageStatus_Name)
+
+
+  def __init__(self, contents=None):
+    self.status_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def status_size(self): return len(self.status_)
+  def status_list(self): return self.status_
+
+  def status(self, i):
+    return self.status_[i]
+
+  def set_status(self, i, x):
+    self.status_[i] = x
+
+  def add_status(self, x):
+    self.status_.append(x)
+
+  def clear_status(self):
+    self.status_ = []
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    for i in xrange(x.status_size()): self.add_status(x.status(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if len(self.status_) != len(x.status_): return 0
+    for e1, e2 in zip(self.status_, x.status_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += 1 * len(self.status_)
+    for i in xrange(len(self.status_)): n += self.lengthVarInt64(self.status_[i])
+    return n + 0
+
+  def Clear(self):
+    self.clear_status()
+
+  def OutputUnchecked(self, out):
+    for i in xrange(len(self.status_)):
+      out.putVarInt32(8)
+      out.putVarInt32(self.status_[i])
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.add_status(d.getVarInt32())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    cnt=0
+    for e in self.status_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kstatus = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "status",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class XmppInviteRequest(ProtocolBuffer.ProtocolMessage):
+  has_jid_ = 0
+  jid_ = ""
+  has_from_jid_ = 0
+  from_jid_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def jid(self): return self.jid_
+
+  def set_jid(self, x):
+    self.has_jid_ = 1
+    self.jid_ = x
+
+  def clear_jid(self):
+    if self.has_jid_:
+      self.has_jid_ = 0
+      self.jid_ = ""
+
+  def has_jid(self): return self.has_jid_
+
+  def from_jid(self): return self.from_jid_
+
+  def set_from_jid(self, x):
+    self.has_from_jid_ = 1
+    self.from_jid_ = x
+
+  def clear_from_jid(self):
+    if self.has_from_jid_:
+      self.has_from_jid_ = 0
+      self.from_jid_ = ""
+
+  def has_from_jid(self): return self.has_from_jid_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_jid()): self.set_jid(x.jid())
+    if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_jid_ != x.has_jid_: return 0
+    if self.has_jid_ and self.jid_ != x.jid_: return 0
+    if self.has_from_jid_ != x.has_from_jid_: return 0
+    if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_jid_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: jid not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.jid_))
+    if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_jid()
+    self.clear_from_jid()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.jid_)
+    if (self.has_from_jid_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.from_jid_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_jid(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_from_jid(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_jid_: res+=prefix+("jid: %s\n" % self.DebugFormatString(self.jid_))
+    if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kjid = 1
+  kfrom_jid = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "jid",
+    2: "from_jid",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class XmppInviteResponse(ProtocolBuffer.ProtocolMessage):
+
+  def __init__(self, contents=None):
+    pass
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+
+  def Equals(self, x):
+    if x is self: return 1
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    return n + 0
+
+  def Clear(self):
+    pass
+
+  def OutputUnchecked(self, out):
+    pass
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['XmppServiceError','PresenceRequest','PresenceResponse','XmppMessageRequest','XmppMessageResponse','XmppInviteRequest','XmppInviteResponse']
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the XMPP API, writes messages to logs."""
+
+
+
+
+
+import logging
+import os
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import xmpp
+from google.appengine.api.xmpp import xmpp_service_pb
+
+
+class XmppServiceStub(apiproxy_stub.APIProxyStub):
+  """Python only xmpp service stub.
+
+  This stub does not use an XMPP network. It prints messages to the console
+  instead of sending any stanzas.
+  """
+
+  def __init__(self, log=logging.info, service_name='xmpp'):
+    """Initializer.
+
+    Args:
+      log: A logger, used for dependency injection.
+      service_name: Service name expected for all calls.
+    """
+    super(XmppServiceStub, self).__init__(service_name)
+    self.log = log
+
+  def _Dynamic_GetPresence(self, request, response):
+    """Implementation of XmppService::GetPresence.
+
+    Returns online if the first character of the JID comes before 'm' in the
+    alphabet, otherwise returns offline.
+
+    Args:
+      request: A PresenceRequest.
+      response: A PresenceResponse.
+    """
+    jid = request.jid()
+    self._GetFrom(request.from_jid())
+    if jid[0] < 'm':
+      response.set_is_available(True)
+    else:
+      response.set_is_available(False)
+
+  def _Dynamic_SendMessage(self, request, response):
+    """Implementation of XmppService::SendMessage.
+
+    Args:
+      request: An XmppMessageRequest.
+      response: An XmppMessageResponse .
+    """
+    from_jid = self._GetFrom(request.from_jid())
+    self.log('Sending an XMPP Message:')
+    self.log('    From:')
+    self.log('       ' + from_jid)
+    self.log('    Body:')
+    self.log('       ' + request.body())
+    self.log('    Type:')
+    self.log('       ' + request.type())
+    self.log('    Raw Xml:')
+    self.log('       ' + str(request.raw_xml()))
+    self.log('    To JIDs:')
+    for jid in request.jid_list():
+      self.log('       ' + jid)
+
+    for jid in request.jid_list():
+      response.add_status(xmpp_service_pb.XmppMessageResponse.NO_ERROR)
+
+  def _Dynamic_SendInvite(self, request, response):
+    """Implementation of XmppService::SendInvite.
+
+    Args:
+      request: An XmppInviteRequest.
+      response: An XmppInviteResponse .
+    """
+    from_jid = self._GetFrom(request.from_jid())
+    self.log('Sending an XMPP Invite:')
+    self.log('    From:')
+    self.log('       ' + from_jid)
+    self.log('    To: ' + request.jid())
+
+  def _GetFrom(self, requested):
+    """Validates that the from JID is valid.
+
+    Args:
+      requested: The requested from JID.
+
+    Returns:
+      string, The from JID.
+
+    Raises:
+      xmpp.InvalidJidError if the requested JID is invalid.
+    """
+
+    appid = os.environ.get('APPLICATION_ID', '')
+    if requested == None or requested == '':
+      return appid + '@appspot.com/bot'
+
+    node, domain, resource = ('', '', '')
+    at = requested.find('@')
+    if at == -1:
+      self.log('Invalid From JID: No \'@\' character found. JID: %s', requested)
+      raise xmpp.InvalidJidError()
+
+    node = requested[:at]
+    rest = requested[at+1:]
+
+    if rest.find('@') > -1:
+      self.log('Invalid From JID: Second \'@\' character found. JID: %s',
+               requested)
+      raise xmpp.InvalidJidError()
+
+    slash = rest.find('/')
+    if slash == -1:
+      domain = rest
+      resource = 'bot'
+    else:
+      domain = rest[:slash]
+      resource = rest[slash+1:]
+
+    if resource.find('/') > -1:
+      self.log('Invalid From JID: Second \'/\' character found. JID: %s',
+               requested)
+      raise xmpp.InvalidJidError()
+
+    if domain == 'appspot.com' and node == appid:
+      return node + '@' + domain + '/' + resource
+    elif domain == appid + '.appspotchat.com':
+      return node + '@' + domain + '/' + resource
+
+    self.log('Invalid From JID: Must be appid@appspot.com[/resource] or '
+             'node@appid.appspotchat.com[/resource]. JID: %s', requested)
+    raise xmpp.InvalidJidError()
--- a/thirdparty/google_appengine/google/appengine/base/capabilities_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/base/capabilities_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -143,22 +143,24 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kconfig = 1
   kdefault_config = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "config",
-   "default_config",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "config",
+    2: "default_config",
+  }, 2)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -409,6 +411,10 @@
     if self.has_error_message_: res+=prefix+("error_message: %s\n" % self.DebugFormatString(self.error_message_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kpackage = 1
   kcapability = 2
   kstatus = 3
@@ -417,34 +423,27 @@
   kadmin_message = 5
   kerror_message = 6
 
-  _TEXT = (
-   "ErrorCode",
-   "package",
-   "capability",
-   "status",
-   "internal_message",
-   "admin_message",
-   "error_message",
-   "scheduled_time",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "package",
+    2: "capability",
+    3: "status",
+    4: "internal_message",
+    5: "admin_message",
+    6: "error_message",
+    7: "scheduled_time",
+  }, 7)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.STRING,
+    7: ProtocolBuffer.Encoder.STRING,
+  }, 7, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/cron/groctimespecification.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/cron/groctimespecification.py	Mon Sep 07 20:27:37 2009 +0200
@@ -47,9 +47,12 @@
 
 try:
   from pytz import NonExistentTimeError
+  from pytz import AmbiguousTimeError
 except ImportError:
   class NonExistentTimeError(Exception):
     pass
+  class AmbiguousTimeError(Exception):
+    pass
 
 
 def GrocTimeSpecification(schedule):
@@ -119,7 +122,7 @@
   """
 
   def __init__(self, interval, period):
-    super(IntervalTimeSpecification, self).__init__(self)
+    super(IntervalTimeSpecification, self).__init__()
     self.interval = interval
     self.period = period
 
@@ -286,13 +289,15 @@
                                       microsecond=0)
         if self.timezone and pytz is not None:
           try:
+            out = self.timezone.localize(out, is_dst=None)
+          except AmbiguousTimeError:
             out = self.timezone.localize(out)
-          except (NonExistentTimeError, IndexError):
+          except NonExistentTimeError:
             for _ in range(24):
               out = out.replace(minute=1) + datetime.timedelta(minutes=60)
               try:
                 out = self.timezone.localize(out)
-              except (NonExistentTimeError, IndexError):
+              except NonExistentTimeError:
                 continue
               break
           out = out.astimezone(pytz.utc)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/datastore/action_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+                   unusednames=printElemNumber,debug_strs no-special"""
+
+class Action(ProtocolBuffer.ProtocolMessage):
+  pass
--- a/thirdparty/google_appengine/google/appengine/datastore/datastore_index.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/datastore/datastore_index.py	Mon Sep 07 20:27:37 2009 +0200
@@ -192,6 +192,74 @@
                         ))
 
 
+def Normalize(filters, orders):
+  """ Normalizes filter and order query components.
+
+  The resulting components have the same effect as the given components if used
+  in a query.
+
+  Returns:
+    (filter, orders) the reduced set of filters and orders
+  """
+
+  for f in filters:
+    if f.op() == datastore_pb.Query_Filter.IN and f.property_size() == 1:
+      f.set_op(datastore_pb.Query_Filter.EQUAL);
+
+  eq_properties = set([f.property(0).name() for f in filters if f.op() == datastore_pb.Query_Filter.EQUAL]);
+
+  remove_set = eq_properties.copy()
+  new_orders = []
+  for o in orders:
+    if o.property() not in remove_set:
+      remove_set.add(o.property())
+      new_orders.append(o)
+  orders = new_orders
+
+
+  if datastore_types._KEY_SPECIAL_PROPERTY in eq_properties:
+    orders = []
+
+  new_orders = []
+  for o in orders:
+    if o.property() == datastore_types._KEY_SPECIAL_PROPERTY:
+      new_orders.append(o)
+      break
+    new_orders.append(o)
+  orders = new_orders
+
+  return (filters, orders)
+
+
+def RemoveNativelySupportedComponents(filters, orders):
+  """ Removes query components that are natively supported by the datastore.
+
+  The resulting filters and orders should not be used in an actual query.
+
+  Returns
+    (filters, orders) the reduced set of filters and orders
+  """
+  (filters, orders) = Normalize(filters, orders)
+
+  has_key_desc_order = False
+  if orders and orders[-1].property() == datastore_types._KEY_SPECIAL_PROPERTY:
+    if orders[-1].direction() == ASCENDING:
+      orders = orders[:-1]
+    else:
+      has_key_desc_order = True
+
+  if not has_key_desc_order:
+    for f in filters:
+      if (f.op() in INEQUALITY_OPERATORS and
+          f.property(0).name() != datastore_types._KEY_SPECIAL_PROPERTY):
+        break
+    else:
+      filters = [f for f in filters
+          if f.property(0).name() != datastore_types._KEY_SPECIAL_PROPERTY]
+
+  return (filters, orders)
+
+
 def CompositeIndexForQuery(query):
   """Return the composite index needed for a query.
 
@@ -213,12 +281,18 @@
     can be at most one of these.
 
   - After that come all the (property, direction) pairs for the Order
-    entries, in the order given in the query.  Exceptions: (a) if
-    there is a Filter entry with an inequality operator that matches
-    the first Order entry, the first order pair is omitted (or,
-    equivalently, in this case the inequality pair is omitted); (b) if
-    an Order entry corresponds to an equality filter, it is ignored
-    (since there will only ever be one value returned).
+    entries, in the order given in the query.  Exceptions:
+      (a) if there is a Filter entry with an inequality operator that matches
+          the first Order entry, the first order pair is omitted (or,
+          equivalently, in this case the inequality pair is omitted).
+      (b) if an Order entry corresponds to an equality filter, it is ignored
+          (since there will only ever be one value returned).
+      (c) if there is an equality filter on __key__ all orders are dropped
+          (since there will be at most one result returned).
+      (d) if there is an order on __key__ all further orders are dropped (since
+          keys are unique).
+      (e) orders on __key__ ASCENDING are dropped (since this is supported
+          natively by the datastore).
 
   - Finally, if there are Filter entries whose operator is EXISTS, and
     whose property names are not already listed, they are added, with
@@ -271,16 +345,18 @@
     nprops = len(filter.property_list())
     assert nprops == 1, 'Filter has %s properties, expected 1' % nprops
 
-  if ancestor and not kind and not filters and not orders:
+  if not kind:
     required = False
 
+  (filters, orders) = RemoveNativelySupportedComponents(filters, orders)
+
   eq_filters = [f for f in filters if f.op() in EQUALITY_OPERATORS]
   ineq_filters = [f for f in filters if f.op() in INEQUALITY_OPERATORS]
   exists_filters = [f for f in filters if f.op() in EXISTS_OPERATORS]
   assert (len(eq_filters) + len(ineq_filters) +
           len(exists_filters)) == len(filters), 'Not all filters used'
 
-  if (kind and eq_filters and not ineq_filters and not exists_filters and
+  if (kind and not ineq_filters and not exists_filters and
       not orders):
     names = set(f.property(0).name() for f in eq_filters)
     if not names.intersection(datastore_types._SPECIAL_PROPERTIES):
@@ -292,16 +368,6 @@
     for filter in ineq_filters:
       assert filter.property(0).name() == ineq_property
 
-  new_orders = []
-  for order in orders:
-    name = order.property()
-    for filter in eq_filters:
-      if filter.property(0).name() == name:
-        break
-    else:
-      new_orders.append(order)
-  orders = new_orders
-
   props = []
 
   for f in eq_filters:
--- a/thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -25,10 +25,12 @@
 from google.appengine.api.api_base_pb import Integer64Proto;
 from google.appengine.api.api_base_pb import StringProto;
 from google.appengine.api.api_base_pb import VoidProto;
+from google.appengine.datastore.action_pb import Action
 from google.appengine.datastore.entity_pb import CompositeIndex
 from google.appengine.datastore.entity_pb import EntityProto
 from google.appengine.datastore.entity_pb import Index
 from google.appengine.datastore.entity_pb import Property
+from google.appengine.datastore.entity_pb import Path
 from google.appengine.datastore.entity_pb import Reference
 class Transaction(ProtocolBuffer.ProtocolMessage):
   has_handle_ = 0
@@ -95,18 +97,21 @@
     if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   khandle = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "handle",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.DOUBLE,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "handle",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.DOUBLE,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -374,6 +379,8 @@
   search_query_ = ""
   has_hint_ = 0
   hint_ = 0
+  has_count_ = 0
+  count_ = 0
   has_offset_ = 0
   offset_ = 0
   has_limit_ = 0
@@ -384,6 +391,10 @@
   keys_only_ = 0
   has_transaction_ = 0
   transaction_ = None
+  has_distinct_ = 0
+  distinct_ = 0
+  has_compile_ = 0
+  compile_ = 0
 
   def __init__(self, contents=None):
     self.filter_ = []
@@ -494,6 +505,19 @@
 
   def has_hint(self): return self.has_hint_
 
+  def count(self): return self.count_
+
+  def set_count(self, x):
+    self.has_count_ = 1
+    self.count_ = x
+
+  def clear_count(self):
+    if self.has_count_:
+      self.has_count_ = 0
+      self.count_ = 0
+
+  def has_count(self): return self.has_count_
+
   def offset(self): return self.offset_
 
   def set_offset(self, x):
@@ -580,6 +604,32 @@
 
   def has_transaction(self): return self.has_transaction_
 
+  def distinct(self): return self.distinct_
+
+  def set_distinct(self, x):
+    self.has_distinct_ = 1
+    self.distinct_ = x
+
+  def clear_distinct(self):
+    if self.has_distinct_:
+      self.has_distinct_ = 0
+      self.distinct_ = 0
+
+  def has_distinct(self): return self.has_distinct_
+
+  def compile(self): return self.compile_
+
+  def set_compile(self, x):
+    self.has_compile_ = 1
+    self.compile_ = x
+
+  def clear_compile(self):
+    if self.has_compile_:
+      self.has_compile_ = 0
+      self.compile_ = 0
+
+  def has_compile(self): return self.has_compile_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -590,12 +640,15 @@
     if (x.has_search_query()): self.set_search_query(x.search_query())
     for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i))
     if (x.has_hint()): self.set_hint(x.hint())
+    if (x.has_count()): self.set_count(x.count())
     if (x.has_offset()): self.set_offset(x.offset())
     if (x.has_limit()): self.set_limit(x.limit())
     for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
     if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
     if (x.has_keys_only()): self.set_keys_only(x.keys_only())
     if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+    if (x.has_distinct()): self.set_distinct(x.distinct())
+    if (x.has_compile()): self.set_compile(x.compile())
 
   def Equals(self, x):
     if x is self: return 1
@@ -615,6 +668,8 @@
       if e1 != e2: return 0
     if self.has_hint_ != x.has_hint_: return 0
     if self.has_hint_ and self.hint_ != x.hint_: return 0
+    if self.has_count_ != x.has_count_: return 0
+    if self.has_count_ and self.count_ != x.count_: return 0
     if self.has_offset_ != x.has_offset_: return 0
     if self.has_offset_ and self.offset_ != x.offset_: return 0
     if self.has_limit_ != x.has_limit_: return 0
@@ -628,6 +683,10 @@
     if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
     if self.has_transaction_ != x.has_transaction_: return 0
     if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+    if self.has_distinct_ != x.has_distinct_: return 0
+    if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
+    if self.has_compile_ != x.has_compile_: return 0
+    if self.has_compile_ and self.compile_ != x.compile_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -657,6 +716,7 @@
     n += 2 * len(self.order_)
     for i in xrange(len(self.order_)): n += self.order_[i].ByteSize()
     if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
+    if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
     if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
     if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
     n += 2 * len(self.composite_index_)
@@ -664,6 +724,8 @@
     if (self.has_require_perfect_plan_): n += 3
     if (self.has_keys_only_): n += 3
     if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSize())
+    if (self.has_distinct_): n += 3
+    if (self.has_compile_): n += 3
     return n + 1
 
   def Clear(self):
@@ -674,12 +736,15 @@
     self.clear_search_query()
     self.clear_order()
     self.clear_hint()
+    self.clear_count()
     self.clear_offset()
     self.clear_limit()
     self.clear_composite_index()
     self.clear_require_perfect_plan()
     self.clear_keys_only()
     self.clear_transaction()
+    self.clear_distinct()
+    self.clear_compile()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -725,6 +790,15 @@
       out.putVarInt32(178)
       out.putVarInt32(self.transaction_.ByteSize())
       self.transaction_.OutputUnchecked(out)
+    if (self.has_count_):
+      out.putVarInt32(184)
+      out.putVarInt32(self.count_)
+    if (self.has_distinct_):
+      out.putVarInt32(192)
+      out.putBoolean(self.distinct_)
+    if (self.has_compile_):
+      out.putVarInt32(200)
+      out.putBoolean(self.compile_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -777,6 +851,15 @@
         d.skip(length)
         self.mutable_transaction().TryMerge(tmp)
         continue
+      if tt == 184:
+        self.set_count(d.getVarInt32())
+        continue
+      if tt == 192:
+        self.set_distinct(d.getBoolean())
+        continue
+      if tt == 200:
+        self.set_compile(d.getBoolean())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -807,6 +890,7 @@
       res+=prefix+"}\n"
       cnt+=1
     if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_))
+    if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
     if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
     if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
     cnt=0
@@ -823,8 +907,14 @@
       res+=prefix+"transaction <\n"
       res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+    if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
+    if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp = 1
   kkind = 3
   kancestor = 17
@@ -836,86 +926,981 @@
   kOrderproperty = 10
   kOrderdirection = 11
   khint = 18
+  kcount = 23
   koffset = 12
   klimit = 16
   kcomposite_index = 19
   krequire_perfect_plan = 20
   kkeys_only = 21
   ktransaction = 22
-
-  _TEXT = (
-   "ErrorCode",
-   "app",
-   None,
-   "kind",
-   "Filter",
-   None,
-   "op",
-   None,
-   "search_query",
-   "Order",
-   "property",
-   "direction",
-   "offset",
-   None,
-   "property",
-   None,
-   "limit",
-   "ancestor",
-   "hint",
-   "composite_index",
-   "require_perfect_plan",
-   "keys_only",
-   "transaction",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  kdistinct = 24
+  kcompile = 25
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app",
+    3: "kind",
+    4: "Filter",
+    6: "op",
+    8: "search_query",
+    9: "Order",
+    10: "property",
+    11: "direction",
+    12: "offset",
+    14: "property",
+    16: "limit",
+    17: "ancestor",
+    18: "hint",
+    19: "composite_index",
+    20: "require_perfect_plan",
+    21: "keys_only",
+    22: "transaction",
+    23: "count",
+    24: "distinct",
+    25: "compile",
+  }, 25)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STARTGROUP,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.STARTGROUP,
+    10: ProtocolBuffer.Encoder.STRING,
+    11: ProtocolBuffer.Encoder.NUMERIC,
+    12: ProtocolBuffer.Encoder.NUMERIC,
+    14: ProtocolBuffer.Encoder.STRING,
+    16: ProtocolBuffer.Encoder.NUMERIC,
+    17: ProtocolBuffer.Encoder.STRING,
+    18: ProtocolBuffer.Encoder.NUMERIC,
+    19: ProtocolBuffer.Encoder.STRING,
+    20: ProtocolBuffer.Encoder.NUMERIC,
+    21: ProtocolBuffer.Encoder.NUMERIC,
+    22: ProtocolBuffer.Encoder.STRING,
+    23: ProtocolBuffer.Encoder.NUMERIC,
+    24: ProtocolBuffer.Encoder.NUMERIC,
+    25: ProtocolBuffer.Encoder.NUMERIC,
+  }, 25, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class CompiledQuery_PrimaryScan(ProtocolBuffer.ProtocolMessage):
+  has_index_name_ = 0
+  index_name_ = ""
+  has_start_key_ = 0
+  start_key_ = ""
+  has_start_inclusive_ = 0
+  start_inclusive_ = 0
+  has_end_key_ = 0
+  end_key_ = ""
+  has_end_inclusive_ = 0
+  end_inclusive_ = 0
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def index_name(self): return self.index_name_
+
+  def set_index_name(self, x):
+    self.has_index_name_ = 1
+    self.index_name_ = x
+
+  def clear_index_name(self):
+    if self.has_index_name_:
+      self.has_index_name_ = 0
+      self.index_name_ = ""
+
+  def has_index_name(self): return self.has_index_name_
+
+  def start_key(self): return self.start_key_
+
+  def set_start_key(self, x):
+    self.has_start_key_ = 1
+    self.start_key_ = x
+
+  def clear_start_key(self):
+    if self.has_start_key_:
+      self.has_start_key_ = 0
+      self.start_key_ = ""
+
+  def has_start_key(self): return self.has_start_key_
+
+  def start_inclusive(self): return self.start_inclusive_
+
+  def set_start_inclusive(self, x):
+    self.has_start_inclusive_ = 1
+    self.start_inclusive_ = x
+
+  def clear_start_inclusive(self):
+    if self.has_start_inclusive_:
+      self.has_start_inclusive_ = 0
+      self.start_inclusive_ = 0
+
+  def has_start_inclusive(self): return self.has_start_inclusive_
+
+  def end_key(self): return self.end_key_
+
+  def set_end_key(self, x):
+    self.has_end_key_ = 1
+    self.end_key_ = x
+
+  def clear_end_key(self):
+    if self.has_end_key_:
+      self.has_end_key_ = 0
+      self.end_key_ = ""
+
+  def has_end_key(self): return self.has_end_key_
+
+  def end_inclusive(self): return self.end_inclusive_
+
+  def set_end_inclusive(self, x):
+    self.has_end_inclusive_ = 1
+    self.end_inclusive_ = x
+
+  def clear_end_inclusive(self):
+    if self.has_end_inclusive_:
+      self.has_end_inclusive_ = 0
+      self.end_inclusive_ = 0
+
+  def has_end_inclusive(self): return self.has_end_inclusive_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_index_name()): self.set_index_name(x.index_name())
+    if (x.has_start_key()): self.set_start_key(x.start_key())
+    if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
+    if (x.has_end_key()): self.set_end_key(x.end_key())
+    if (x.has_end_inclusive()): self.set_end_inclusive(x.end_inclusive())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_index_name_ != x.has_index_name_: return 0
+    if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
+    if self.has_start_key_ != x.has_start_key_: return 0
+    if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
+    if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
+    if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
+    if self.has_end_key_ != x.has_end_key_: return 0
+    if self.has_end_key_ and self.end_key_ != x.end_key_: return 0
+    if self.has_end_inclusive_ != x.has_end_inclusive_: return 0
+    if self.has_end_inclusive_ and self.end_inclusive_ != x.end_inclusive_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
+    if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
+    if (self.has_start_inclusive_): n += 2
+    if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
+    if (self.has_end_inclusive_): n += 2
+    return n + 0
+
+  def Clear(self):
+    self.clear_index_name()
+    self.clear_start_key()
+    self.clear_start_inclusive()
+    self.clear_end_key()
+    self.clear_end_inclusive()
+
+  def OutputUnchecked(self, out):
+    if (self.has_index_name_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.index_name_)
+    if (self.has_start_key_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.start_key_)
+    if (self.has_start_inclusive_):
+      out.putVarInt32(32)
+      out.putBoolean(self.start_inclusive_)
+    if (self.has_end_key_):
+      out.putVarInt32(42)
+      out.putPrefixedString(self.end_key_)
+    if (self.has_end_inclusive_):
+      out.putVarInt32(48)
+      out.putBoolean(self.end_inclusive_)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 12: break
+      if tt == 18:
+        self.set_index_name(d.getPrefixedString())
+        continue
+      if tt == 26:
+        self.set_start_key(d.getPrefixedString())
+        continue
+      if tt == 32:
+        self.set_start_inclusive(d.getBoolean())
+        continue
+      if tt == 42:
+        self.set_end_key(d.getPrefixedString())
+        continue
+      if tt == 48:
+        self.set_end_inclusive(d.getBoolean())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
+    if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
+    if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
+    if self.has_end_key_: res+=prefix+("end_key: %s\n" % self.DebugFormatString(self.end_key_))
+    if self.has_end_inclusive_: res+=prefix+("end_inclusive: %s\n" % self.DebugFormatBool(self.end_inclusive_))
+    return res
+
+class CompiledQuery_MergeJoinScan(ProtocolBuffer.ProtocolMessage):
+  has_index_name_ = 0
+  index_name_ = ""
+
+  def __init__(self, contents=None):
+    self.prefix_value_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def index_name(self): return self.index_name_
+
+  def set_index_name(self, x):
+    self.has_index_name_ = 1
+    self.index_name_ = x
+
+  def clear_index_name(self):
+    if self.has_index_name_:
+      self.has_index_name_ = 0
+      self.index_name_ = ""
+
+  def has_index_name(self): return self.has_index_name_
+
+  def prefix_value_size(self): return len(self.prefix_value_)
+  def prefix_value_list(self): return self.prefix_value_
+
+  def prefix_value(self, i):
+    return self.prefix_value_[i]
+
+  def set_prefix_value(self, i, x):
+    self.prefix_value_[i] = x
+
+  def add_prefix_value(self, x):
+    self.prefix_value_.append(x)
+
+  def clear_prefix_value(self):
+    self.prefix_value_ = []
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_index_name()): self.set_index_name(x.index_name())
+    for i in xrange(x.prefix_value_size()): self.add_prefix_value(x.prefix_value(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_index_name_ != x.has_index_name_: return 0
+    if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
+    if len(self.prefix_value_) != len(x.prefix_value_): return 0
+    for e1, e2 in zip(self.prefix_value_, x.prefix_value_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_index_name_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: index_name not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.index_name_))
+    n += 1 * len(self.prefix_value_)
+    for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
+    return n + 1
+
+  def Clear(self):
+    self.clear_index_name()
+    self.clear_prefix_value()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(66)
+    out.putPrefixedString(self.index_name_)
+    for i in xrange(len(self.prefix_value_)):
+      out.putVarInt32(74)
+      out.putPrefixedString(self.prefix_value_[i])
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 60: break
+      if tt == 66:
+        self.set_index_name(d.getPrefixedString())
+        continue
+      if tt == 74:
+        self.add_prefix_value(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
+    cnt=0
+    for e in self.prefix_value_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("prefix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    return res
+
+class CompiledQuery_EntityFilter(ProtocolBuffer.ProtocolMessage):
+  has_distinct_ = 0
+  distinct_ = 0
+  has_offset_ = 0
+  offset_ = 0
+  has_limit_ = 0
+  limit_ = 0
+  has_kind_ = 0
+  kind_ = ""
+  has_ancestor_ = 0
+  ancestor_ = None
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def distinct(self): return self.distinct_
+
+  def set_distinct(self, x):
+    self.has_distinct_ = 1
+    self.distinct_ = x
+
+  def clear_distinct(self):
+    if self.has_distinct_:
+      self.has_distinct_ = 0
+      self.distinct_ = 0
+
+  def has_distinct(self): return self.has_distinct_
+
+  def offset(self): return self.offset_
+
+  def set_offset(self, x):
+    self.has_offset_ = 1
+    self.offset_ = x
+
+  def clear_offset(self):
+    if self.has_offset_:
+      self.has_offset_ = 0
+      self.offset_ = 0
+
+  def has_offset(self): return self.has_offset_
+
+  def limit(self): return self.limit_
+
+  def set_limit(self, x):
+    self.has_limit_ = 1
+    self.limit_ = x
+
+  def clear_limit(self):
+    if self.has_limit_:
+      self.has_limit_ = 0
+      self.limit_ = 0
+
+  def has_limit(self): return self.has_limit_
+
+  def kind(self): return self.kind_
+
+  def set_kind(self, x):
+    self.has_kind_ = 1
+    self.kind_ = x
+
+  def clear_kind(self):
+    if self.has_kind_:
+      self.has_kind_ = 0
+      self.kind_ = ""
+
+  def has_kind(self): return self.has_kind_
+
+  def ancestor(self):
+    if self.ancestor_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.ancestor_ is None: self.ancestor_ = Reference()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.ancestor_
+
+  def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
+
+  def clear_ancestor(self):
+    if self.has_ancestor_:
+      self.has_ancestor_ = 0;
+      if self.ancestor_ is not None: self.ancestor_.Clear()
+
+  def has_ancestor(self): return self.has_ancestor_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_distinct()): self.set_distinct(x.distinct())
+    if (x.has_offset()): self.set_offset(x.offset())
+    if (x.has_limit()): self.set_limit(x.limit())
+    if (x.has_kind()): self.set_kind(x.kind())
+    if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_distinct_ != x.has_distinct_: return 0
+    if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
+    if self.has_offset_ != x.has_offset_: return 0
+    if self.has_offset_ and self.offset_ != x.offset_: return 0
+    if self.has_limit_ != x.has_limit_: return 0
+    if self.has_limit_ and self.limit_ != x.limit_: return 0
+    if self.has_kind_ != x.has_kind_: return 0
+    if self.has_kind_ and self.kind_ != x.kind_: return 0
+    if self.has_ancestor_ != x.has_ancestor_: return 0
+    if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_distinct_): n += 2
+    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+    if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
+    if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
+    if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
+    return n + 0
+
+  def Clear(self):
+    self.clear_distinct()
+    self.clear_offset()
+    self.clear_limit()
+    self.clear_kind()
+    self.clear_ancestor()
+
+  def OutputUnchecked(self, out):
+    if (self.has_distinct_):
+      out.putVarInt32(112)
+      out.putBoolean(self.distinct_)
+    if (self.has_offset_):
+      out.putVarInt32(120)
+      out.putVarInt32(self.offset_)
+    if (self.has_limit_):
+      out.putVarInt32(128)
+      out.putVarInt32(self.limit_)
+    if (self.has_kind_):
+      out.putVarInt32(138)
+      out.putPrefixedString(self.kind_)
+    if (self.has_ancestor_):
+      out.putVarInt32(146)
+      out.putVarInt32(self.ancestor_.ByteSize())
+      self.ancestor_.OutputUnchecked(out)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 108: break
+      if tt == 112:
+        self.set_distinct(d.getBoolean())
+        continue
+      if tt == 120:
+        self.set_offset(d.getVarInt32())
+        continue
+      if tt == 128:
+        self.set_limit(d.getVarInt32())
+        continue
+      if tt == 138:
+        self.set_kind(d.getPrefixedString())
+        continue
+      if tt == 146:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_ancestor().TryMerge(tmp)
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
+    if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
+    if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
+    if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
+    if self.has_ancestor_:
+      res+=prefix+"ancestor <\n"
+      res+=self.ancestor_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+class CompiledQuery(ProtocolBuffer.ProtocolMessage):
+  has_primaryscan_ = 0
+  has_offset_ = 0
+  offset_ = 0
+  has_limit_ = 0
+  limit_ = 0
+  has_keys_only_ = 0
+  keys_only_ = 0
+  has_entityfilter_ = 0
+  entityfilter_ = None
+
+  def __init__(self, contents=None):
+    self.primaryscan_ = CompiledQuery_PrimaryScan()
+    self.mergejoinscan_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def primaryscan(self): return self.primaryscan_
+
+  def mutable_primaryscan(self): self.has_primaryscan_ = 1; return self.primaryscan_
+
+  def clear_primaryscan(self):self.has_primaryscan_ = 0; self.primaryscan_.Clear()
+
+  def has_primaryscan(self): return self.has_primaryscan_
+
+  def mergejoinscan_size(self): return len(self.mergejoinscan_)
+  def mergejoinscan_list(self): return self.mergejoinscan_
+
+  def mergejoinscan(self, i):
+    return self.mergejoinscan_[i]
+
+  def mutable_mergejoinscan(self, i):
+    return self.mergejoinscan_[i]
+
+  def add_mergejoinscan(self):
+    x = CompiledQuery_MergeJoinScan()
+    self.mergejoinscan_.append(x)
+    return x
+
+  def clear_mergejoinscan(self):
+    self.mergejoinscan_ = []
+  def offset(self): return self.offset_
+
+  def set_offset(self, x):
+    self.has_offset_ = 1
+    self.offset_ = x
+
+  def clear_offset(self):
+    if self.has_offset_:
+      self.has_offset_ = 0
+      self.offset_ = 0
+
+  def has_offset(self): return self.has_offset_
+
+  def limit(self): return self.limit_
+
+  def set_limit(self, x):
+    self.has_limit_ = 1
+    self.limit_ = x
+
+  def clear_limit(self):
+    if self.has_limit_:
+      self.has_limit_ = 0
+      self.limit_ = 0
+
+  def has_limit(self): return self.has_limit_
+
+  def keys_only(self): return self.keys_only_
+
+  def set_keys_only(self, x):
+    self.has_keys_only_ = 1
+    self.keys_only_ = x
+
+  def clear_keys_only(self):
+    if self.has_keys_only_:
+      self.has_keys_only_ = 0
+      self.keys_only_ = 0
+
+  def has_keys_only(self): return self.has_keys_only_
+
+  def entityfilter(self):
+    if self.entityfilter_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.entityfilter_ is None: self.entityfilter_ = CompiledQuery_EntityFilter()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.entityfilter_
+
+  def mutable_entityfilter(self): self.has_entityfilter_ = 1; return self.entityfilter()
+
+  def clear_entityfilter(self):
+    if self.has_entityfilter_:
+      self.has_entityfilter_ = 0;
+      if self.entityfilter_ is not None: self.entityfilter_.Clear()
+
+  def has_entityfilter(self): return self.has_entityfilter_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_primaryscan()): self.mutable_primaryscan().MergeFrom(x.primaryscan())
+    for i in xrange(x.mergejoinscan_size()): self.add_mergejoinscan().CopyFrom(x.mergejoinscan(i))
+    if (x.has_offset()): self.set_offset(x.offset())
+    if (x.has_limit()): self.set_limit(x.limit())
+    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
+    if (x.has_entityfilter()): self.mutable_entityfilter().MergeFrom(x.entityfilter())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_primaryscan_ != x.has_primaryscan_: return 0
+    if self.has_primaryscan_ and self.primaryscan_ != x.primaryscan_: return 0
+    if len(self.mergejoinscan_) != len(x.mergejoinscan_): return 0
+    for e1, e2 in zip(self.mergejoinscan_, x.mergejoinscan_):
+      if e1 != e2: return 0
+    if self.has_offset_ != x.has_offset_: return 0
+    if self.has_offset_ and self.offset_ != x.offset_: return 0
+    if self.has_limit_ != x.has_limit_: return 0
+    if self.has_limit_ and self.limit_ != x.limit_: return 0
+    if self.has_keys_only_ != x.has_keys_only_: return 0
+    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
+    if self.has_entityfilter_ != x.has_entityfilter_: return 0
+    if self.has_entityfilter_ and self.entityfilter_ != x.entityfilter_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_primaryscan_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: primaryscan not set.')
+    elif not self.primaryscan_.IsInitialized(debug_strs): initialized = 0
+    for p in self.mergejoinscan_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (not self.has_keys_only_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: keys_only not set.')
+    if (self.has_entityfilter_ and not self.entityfilter_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.primaryscan_.ByteSize()
+    n += 2 * len(self.mergejoinscan_)
+    for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSize()
+    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+    if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
+    if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSize()
+    return n + 4
+
+  def Clear(self):
+    self.clear_primaryscan()
+    self.clear_mergejoinscan()
+    self.clear_offset()
+    self.clear_limit()
+    self.clear_keys_only()
+    self.clear_entityfilter()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(11)
+    self.primaryscan_.OutputUnchecked(out)
+    out.putVarInt32(12)
+    for i in xrange(len(self.mergejoinscan_)):
+      out.putVarInt32(59)
+      self.mergejoinscan_[i].OutputUnchecked(out)
+      out.putVarInt32(60)
+    if (self.has_offset_):
+      out.putVarInt32(80)
+      out.putVarInt32(self.offset_)
+    if (self.has_limit_):
+      out.putVarInt32(88)
+      out.putVarInt32(self.limit_)
+    out.putVarInt32(96)
+    out.putBoolean(self.keys_only_)
+    if (self.has_entityfilter_):
+      out.putVarInt32(107)
+      self.entityfilter_.OutputUnchecked(out)
+      out.putVarInt32(108)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 11:
+        self.mutable_primaryscan().TryMerge(d)
+        continue
+      if tt == 59:
+        self.add_mergejoinscan().TryMerge(d)
+        continue
+      if tt == 80:
+        self.set_offset(d.getVarInt32())
+        continue
+      if tt == 88:
+        self.set_limit(d.getVarInt32())
+        continue
+      if tt == 96:
+        self.set_keys_only(d.getBoolean())
+        continue
+      if tt == 107:
+        self.mutable_entityfilter().TryMerge(d)
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_primaryscan_:
+      res+=prefix+"PrimaryScan {\n"
+      res+=self.primaryscan_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+    cnt=0
+    for e in self.mergejoinscan_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("MergeJoinScan%s {\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+      cnt+=1
+    if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
+    if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
+    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
+    if self.has_entityfilter_:
+      res+=prefix+"EntityFilter {\n"
+      res+=self.entityfilter_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kPrimaryScanGroup = 1
+  kPrimaryScanindex_name = 2
+  kPrimaryScanstart_key = 3
+  kPrimaryScanstart_inclusive = 4
+  kPrimaryScanend_key = 5
+  kPrimaryScanend_inclusive = 6
+  kMergeJoinScanGroup = 7
+  kMergeJoinScanindex_name = 8
+  kMergeJoinScanprefix_value = 9
+  koffset = 10
+  klimit = 11
+  kkeys_only = 12
+  kEntityFilterGroup = 13
+  kEntityFilterdistinct = 14
+  kEntityFilteroffset = 15
+  kEntityFilterlimit = 16
+  kEntityFilterkind = 17
+  kEntityFilterancestor = 18
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "PrimaryScan",
+    2: "index_name",
+    3: "start_key",
+    4: "start_inclusive",
+    5: "end_key",
+    6: "end_inclusive",
+    7: "MergeJoinScan",
+    8: "index_name",
+    9: "prefix_value",
+    10: "offset",
+    11: "limit",
+    12: "keys_only",
+    13: "EntityFilter",
+    14: "distinct",
+    15: "offset",
+    16: "limit",
+    17: "kind",
+    18: "ancestor",
+  }, 18)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+    7: ProtocolBuffer.Encoder.STARTGROUP,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.STRING,
+    10: ProtocolBuffer.Encoder.NUMERIC,
+    11: ProtocolBuffer.Encoder.NUMERIC,
+    12: ProtocolBuffer.Encoder.NUMERIC,
+    13: ProtocolBuffer.Encoder.STARTGROUP,
+    14: ProtocolBuffer.Encoder.NUMERIC,
+    15: ProtocolBuffer.Encoder.NUMERIC,
+    16: ProtocolBuffer.Encoder.NUMERIC,
+    17: ProtocolBuffer.Encoder.STRING,
+    18: ProtocolBuffer.Encoder.STRING,
+  }, 18, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class RunCompiledQueryRequest(ProtocolBuffer.ProtocolMessage):
+  has_compiled_query_ = 0
+  has_original_query_ = 0
+  original_query_ = None
+  has_count_ = 0
+  count_ = 0
+
+  def __init__(self, contents=None):
+    self.compiled_query_ = CompiledQuery()
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def compiled_query(self): return self.compiled_query_
+
+  def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query_
+
+  def clear_compiled_query(self):self.has_compiled_query_ = 0; self.compiled_query_.Clear()
+
+  def has_compiled_query(self): return self.has_compiled_query_
+
+  def original_query(self):
+    if self.original_query_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.original_query_ is None: self.original_query_ = Query()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.original_query_
+
+  def mutable_original_query(self): self.has_original_query_ = 1; return self.original_query()
+
+  def clear_original_query(self):
+    if self.has_original_query_:
+      self.has_original_query_ = 0;
+      if self.original_query_ is not None: self.original_query_.Clear()
+
+  def has_original_query(self): return self.has_original_query_
+
+  def count(self): return self.count_
+
+  def set_count(self, x):
+    self.has_count_ = 1
+    self.count_ = x
+
+  def clear_count(self):
+    if self.has_count_:
+      self.has_count_ = 0
+      self.count_ = 0
+
+  def has_count(self): return self.has_count_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
+    if (x.has_original_query()): self.mutable_original_query().MergeFrom(x.original_query())
+    if (x.has_count()): self.set_count(x.count())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_compiled_query_ != x.has_compiled_query_: return 0
+    if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
+    if self.has_original_query_ != x.has_original_query_: return 0
+    if self.has_original_query_ and self.original_query_ != x.original_query_: return 0
+    if self.has_count_ != x.has_count_: return 0
+    if self.has_count_ and self.count_ != x.count_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_compiled_query_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: compiled_query not set.')
+    elif not self.compiled_query_.IsInitialized(debug_strs): initialized = 0
+    if (self.has_original_query_ and not self.original_query_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(self.compiled_query_.ByteSize())
+    if (self.has_original_query_): n += 1 + self.lengthString(self.original_query_.ByteSize())
+    if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
+    return n + 1
+
+  def Clear(self):
+    self.clear_compiled_query()
+    self.clear_original_query()
+    self.clear_count()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.compiled_query_.ByteSize())
+    self.compiled_query_.OutputUnchecked(out)
+    if (self.has_original_query_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.original_query_.ByteSize())
+      self.original_query_.OutputUnchecked(out)
+    if (self.has_count_):
+      out.putVarInt32(24)
+      out.putVarInt32(self.count_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_compiled_query().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_original_query().TryMerge(tmp)
+        continue
+      if tt == 24:
+        self.set_count(d.getVarInt32())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_compiled_query_:
+      res+=prefix+"compiled_query <\n"
+      res+=self.compiled_query_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_original_query_:
+      res+=prefix+"original_query <\n"
+      res+=self.original_query_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kcompiled_query = 1
+  koriginal_query = 2
+  kcount = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "compiled_query",
+    2: "original_query",
+    3: "count",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1080,30 +2065,30 @@
     if self.has_native_limit_: res+=prefix+("native_limit: %s\n" % self.DebugFormatInt32(self.native_limit_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   knative_ancestor = 1
   knative_index = 2
   knative_offset = 3
   knative_limit = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "native_ancestor",
-   "native_index",
-   "native_offset",
-   "native_limit",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "native_ancestor",
+    2: "native_index",
+    3: "native_offset",
+    4: "native_limit",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1172,18 +2157,21 @@
     if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcursor = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "cursor",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.DOUBLE,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cursor",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.DOUBLE,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1194,6 +2182,7 @@
   INTERNAL_ERROR =    3
   NEED_INDEX   =    4
   TIMEOUT      =    5
+  PERMISSION_DENIED =    6
 
   _ErrorCode_NAMES = {
     1: "BAD_REQUEST",
@@ -1201,6 +2190,7 @@
     3: "INTERNAL_ERROR",
     4: "NEED_INDEX",
     5: "TIMEOUT",
+    6: "PERMISSION_DENIED",
   }
 
   def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -1245,13 +2235,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1399,30 +2393,30 @@
     if self.has_entity_write_bytes_: res+=prefix+("entity_write_bytes: %s\n" % self.DebugFormatInt32(self.entity_write_bytes_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kindex_writes = 1
   kindex_write_bytes = 2
   kentity_writes = 3
   kentity_write_bytes = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "index_writes",
-   "index_write_bytes",
-   "entity_writes",
-   "entity_write_bytes",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "index_writes",
+    2: "index_write_bytes",
+    3: "entity_writes",
+    4: "entity_write_bytes",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1547,22 +2541,24 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 1
   ktransaction = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "key",
-   "transaction",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "transaction",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1721,22 +2717,24 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kEntityGroup = 1
   kEntityentity = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "Entity",
-   "entity",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Entity",
+    2: "entity",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1932,30 +2930,30 @@
     if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kentity = 1
   ktransaction = 2
   kcomposite_index = 3
   ktrusted = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "entity",
-   "transaction",
-   "composite_index",
-   "trusted",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "entity",
+    2: "transaction",
+    3: "composite_index",
+    4: "trusted",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2080,22 +3078,24 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 1
   kcost = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "key",
-   "cost",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "cost",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2247,35 +3247,27 @@
     if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 6
   ktransaction = 5
   ktrusted = 4
 
-  _TEXT = (
-   "ErrorCode",
-   None,
-   None,
-   None,
-   "trusted",
-   "transaction",
-   "key",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    4: "trusted",
+    5: "transaction",
+    6: "key",
+  }, 6)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.STRING,
+  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2356,25 +3348,30 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcost = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "cost",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cost",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 class NextRequest(ProtocolBuffer.ProtocolMessage):
   has_cursor_ = 0
   has_count_ = 0
-  count_ = 1
+  count_ = 0
+  has_compile_ = 0
+  compile_ = 0
 
   def __init__(self, contents=None):
     self.cursor_ = Cursor()
@@ -2397,15 +3394,29 @@
   def clear_count(self):
     if self.has_count_:
       self.has_count_ = 0
-      self.count_ = 1
+      self.count_ = 0
 
   def has_count(self): return self.has_count_
 
+  def compile(self): return self.compile_
+
+  def set_compile(self, x):
+    self.has_compile_ = 1
+    self.compile_ = x
+
+  def clear_compile(self):
+    if self.has_compile_:
+      self.has_compile_ = 0
+      self.compile_ = 0
+
+  def has_compile(self): return self.has_compile_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
     if (x.has_count()): self.set_count(x.count())
+    if (x.has_compile()): self.set_compile(x.compile())
 
   def Equals(self, x):
     if x is self: return 1
@@ -2413,6 +3424,8 @@
     if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
     if self.has_count_ != x.has_count_: return 0
     if self.has_count_ and self.count_ != x.count_: return 0
+    if self.has_compile_ != x.has_compile_: return 0
+    if self.has_compile_ and self.compile_ != x.compile_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -2428,11 +3441,13 @@
     n = 0
     n += self.lengthString(self.cursor_.ByteSize())
     if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
+    if (self.has_compile_): n += 2
     return n + 1
 
   def Clear(self):
     self.clear_cursor()
     self.clear_count()
+    self.clear_compile()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -2441,6 +3456,9 @@
     if (self.has_count_):
       out.putVarInt32(16)
       out.putVarInt32(self.count_)
+    if (self.has_compile_):
+      out.putVarInt32(24)
+      out.putBoolean(self.compile_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -2454,6 +3472,9 @@
       if tt == 16:
         self.set_count(d.getVarInt32())
         continue
+      if tt == 24:
+        self.set_compile(d.getBoolean())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -2465,24 +3486,30 @@
       res+=self.cursor_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
     if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
+    if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcursor = 1
   kcount = 2
-
-  _TEXT = (
-   "ErrorCode",
-   "cursor",
-   "count",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  kcompile = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cursor",
+    2: "count",
+    3: "compile",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2493,6 +3520,8 @@
   more_results_ = 0
   has_keys_only_ = 0
   keys_only_ = 0
+  has_compiled_query_ = 0
+  compiled_query_ = None
 
   def __init__(self, contents=None):
     self.result_ = []
@@ -2559,6 +3588,24 @@
 
   def has_keys_only(self): return self.has_keys_only_
 
+  def compiled_query(self):
+    if self.compiled_query_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.compiled_query_ is None: self.compiled_query_ = CompiledQuery()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.compiled_query_
+
+  def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query()
+
+  def clear_compiled_query(self):
+    if self.has_compiled_query_:
+      self.has_compiled_query_ = 0;
+      if self.compiled_query_ is not None: self.compiled_query_.Clear()
+
+  def has_compiled_query(self): return self.has_compiled_query_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -2566,6 +3613,7 @@
     for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
     if (x.has_more_results()): self.set_more_results(x.more_results())
     if (x.has_keys_only()): self.set_keys_only(x.keys_only())
+    if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
 
   def Equals(self, x):
     if x is self: return 1
@@ -2578,6 +3626,8 @@
     if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
     if self.has_keys_only_ != x.has_keys_only_: return 0
     if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
+    if self.has_compiled_query_ != x.has_compiled_query_: return 0
+    if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -2589,6 +3639,7 @@
       initialized = 0
       if debug_strs is not None:
         debug_strs.append('Required field: more_results not set.')
+    if (self.has_compiled_query_ and not self.compiled_query_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
@@ -2597,6 +3648,7 @@
     n += 1 * len(self.result_)
     for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
     if (self.has_keys_only_): n += 2
+    if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSize())
     return n + 2
 
   def Clear(self):
@@ -2604,6 +3656,7 @@
     self.clear_result()
     self.clear_more_results()
     self.clear_keys_only()
+    self.clear_compiled_query()
 
   def OutputUnchecked(self, out):
     if (self.has_cursor_):
@@ -2619,6 +3672,10 @@
     if (self.has_keys_only_):
       out.putVarInt32(32)
       out.putBoolean(self.keys_only_)
+    if (self.has_compiled_query_):
+      out.putVarInt32(42)
+      out.putVarInt32(self.compiled_query_.ByteSize())
+      self.compiled_query_.OutputUnchecked(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -2641,6 +3698,12 @@
       if tt == 32:
         self.set_keys_only(d.getBoolean())
         continue
+      if tt == 42:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_compiled_query().TryMerge(tmp)
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -2661,36 +3724,219 @@
       cnt+=1
     if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
     if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
+    if self.has_compiled_query_:
+      res+=prefix+"compiled_query <\n"
+      res+=self.compiled_query_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcursor = 1
   kresult = 2
   kmore_results = 3
   kkeys_only = 4
-
-  _TEXT = (
-   "ErrorCode",
-   "cursor",
-   "result",
-   "more_results",
-   "keys_only",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  kcompiled_query = 5
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cursor",
+    2: "result",
+    3: "more_results",
+    4: "keys_only",
+    5: "compiled_query",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class GetSchemaRequest(ProtocolBuffer.ProtocolMessage):
+  has_app_ = 0
+  app_ = ""
+  has_start_kind_ = 0
+  start_kind_ = ""
+  has_end_kind_ = 0
+  end_kind_ = ""
+  has_properties_ = 0
+  properties_ = 1
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def app(self): return self.app_
+
+  def set_app(self, x):
+    self.has_app_ = 1
+    self.app_ = x
+
+  def clear_app(self):
+    if self.has_app_:
+      self.has_app_ = 0
+      self.app_ = ""
+
+  def has_app(self): return self.has_app_
+
+  def start_kind(self): return self.start_kind_
+
+  def set_start_kind(self, x):
+    self.has_start_kind_ = 1
+    self.start_kind_ = x
+
+  def clear_start_kind(self):
+    if self.has_start_kind_:
+      self.has_start_kind_ = 0
+      self.start_kind_ = ""
+
+  def has_start_kind(self): return self.has_start_kind_
+
+  def end_kind(self): return self.end_kind_
+
+  def set_end_kind(self, x):
+    self.has_end_kind_ = 1
+    self.end_kind_ = x
+
+  def clear_end_kind(self):
+    if self.has_end_kind_:
+      self.has_end_kind_ = 0
+      self.end_kind_ = ""
+
+  def has_end_kind(self): return self.has_end_kind_
+
+  def properties(self): return self.properties_
+
+  def set_properties(self, x):
+    self.has_properties_ = 1
+    self.properties_ = x
+
+  def clear_properties(self):
+    if self.has_properties_:
+      self.has_properties_ = 0
+      self.properties_ = 1
+
+  def has_properties(self): return self.has_properties_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_app()): self.set_app(x.app())
+    if (x.has_start_kind()): self.set_start_kind(x.start_kind())
+    if (x.has_end_kind()): self.set_end_kind(x.end_kind())
+    if (x.has_properties()): self.set_properties(x.properties())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_app_ != x.has_app_: return 0
+    if self.has_app_ and self.app_ != x.app_: return 0
+    if self.has_start_kind_ != x.has_start_kind_: return 0
+    if self.has_start_kind_ and self.start_kind_ != x.start_kind_: return 0
+    if self.has_end_kind_ != x.has_end_kind_: return 0
+    if self.has_end_kind_ and self.end_kind_ != x.end_kind_: return 0
+    if self.has_properties_ != x.has_properties_: return 0
+    if self.has_properties_ and self.properties_ != x.properties_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_app_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: app not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.app_))
+    if (self.has_start_kind_): n += 1 + self.lengthString(len(self.start_kind_))
+    if (self.has_end_kind_): n += 1 + self.lengthString(len(self.end_kind_))
+    if (self.has_properties_): n += 2
+    return n + 1
+
+  def Clear(self):
+    self.clear_app()
+    self.clear_start_kind()
+    self.clear_end_kind()
+    self.clear_properties()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.app_)
+    if (self.has_start_kind_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.start_kind_)
+    if (self.has_end_kind_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.end_kind_)
+    if (self.has_properties_):
+      out.putVarInt32(32)
+      out.putBoolean(self.properties_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_app(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_start_kind(d.getPrefixedString())
+        continue
+      if tt == 26:
+        self.set_end_kind(d.getPrefixedString())
+        continue
+      if tt == 32:
+        self.set_properties(d.getBoolean())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
+    if self.has_start_kind_: res+=prefix+("start_kind: %s\n" % self.DebugFormatString(self.start_kind_))
+    if self.has_end_kind_: res+=prefix+("end_kind: %s\n" % self.DebugFormatString(self.end_kind_))
+    if self.has_properties_: res+=prefix+("properties: %s\n" % self.DebugFormatBool(self.properties_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kapp = 1
+  kstart_kind = 2
+  kend_kind = 3
+  kproperties = 4
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app",
+    2: "start_kind",
+    3: "end_kind",
+    4: "properties",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 class Schema(ProtocolBuffer.ProtocolMessage):
+  has_more_results_ = 0
+  more_results_ = 0
 
   def __init__(self, contents=None):
     self.kind_ = []
@@ -2712,16 +3958,32 @@
 
   def clear_kind(self):
     self.kind_ = []
+  def more_results(self): return self.more_results_
+
+  def set_more_results(self, x):
+    self.has_more_results_ = 1
+    self.more_results_ = x
+
+  def clear_more_results(self):
+    if self.has_more_results_:
+      self.has_more_results_ = 0
+      self.more_results_ = 0
+
+  def has_more_results(self): return self.has_more_results_
+
 
   def MergeFrom(self, x):
     assert x is not self
     for i in xrange(x.kind_size()): self.add_kind().CopyFrom(x.kind(i))
+    if (x.has_more_results()): self.set_more_results(x.more_results())
 
   def Equals(self, x):
     if x is self: return 1
     if len(self.kind_) != len(x.kind_): return 0
     for e1, e2 in zip(self.kind_, x.kind_):
       if e1 != e2: return 0
+    if self.has_more_results_ != x.has_more_results_: return 0
+    if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -2734,16 +3996,21 @@
     n = 0
     n += 1 * len(self.kind_)
     for i in xrange(len(self.kind_)): n += self.lengthString(self.kind_[i].ByteSize())
+    if (self.has_more_results_): n += 2
     return n + 0
 
   def Clear(self):
     self.clear_kind()
+    self.clear_more_results()
 
   def OutputUnchecked(self, out):
     for i in xrange(len(self.kind_)):
       out.putVarInt32(10)
       out.putVarInt32(self.kind_[i].ByteSize())
       self.kind_[i].OutputUnchecked(out)
+    if (self.has_more_results_):
+      out.putVarInt32(16)
+      out.putBoolean(self.more_results_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -2754,6 +4021,9 @@
         d.skip(length)
         self.add_kind().TryMerge(tmp)
         continue
+      if tt == 16:
+        self.set_more_results(d.getBoolean())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -2768,20 +4038,264 @@
       res+=e.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
       cnt+=1
+    if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkind = 1
-
-  _TEXT = (
-   "ErrorCode",
-   "kind",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  kmore_results = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "kind",
+    2: "more_results",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage):
+  has_model_key_ = 0
+  has_size_ = 0
+  size_ = 0
+
+  def __init__(self, contents=None):
+    self.model_key_ = Reference()
+    if contents is not None: self.MergeFromString(contents)
+
+  def model_key(self): return self.model_key_
+
+  def mutable_model_key(self): self.has_model_key_ = 1; return self.model_key_
+
+  def clear_model_key(self):self.has_model_key_ = 0; self.model_key_.Clear()
+
+  def has_model_key(self): return self.has_model_key_
+
+  def size(self): return self.size_
+
+  def set_size(self, x):
+    self.has_size_ = 1
+    self.size_ = x
+
+  def clear_size(self):
+    if self.has_size_:
+      self.has_size_ = 0
+      self.size_ = 0
+
+  def has_size(self): return self.has_size_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_model_key()): self.mutable_model_key().MergeFrom(x.model_key())
+    if (x.has_size()): self.set_size(x.size())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_model_key_ != x.has_model_key_: return 0
+    if self.has_model_key_ and self.model_key_ != x.model_key_: return 0
+    if self.has_size_ != x.has_size_: return 0
+    if self.has_size_ and self.size_ != x.size_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_model_key_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: model_key not set.')
+    elif not self.model_key_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_size_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: size not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(self.model_key_.ByteSize())
+    n += self.lengthVarInt64(self.size_)
+    return n + 2
+
+  def Clear(self):
+    self.clear_model_key()
+    self.clear_size()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.model_key_.ByteSize())
+    self.model_key_.OutputUnchecked(out)
+    out.putVarInt32(16)
+    out.putVarInt64(self.size_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_model_key().TryMerge(tmp)
+        continue
+      if tt == 16:
+        self.set_size(d.getVarInt64())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_model_key_:
+      res+=prefix+"model_key <\n"
+      res+=self.model_key_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_size_: res+=prefix+("size: %s\n" % self.DebugFormatInt64(self.size_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kmodel_key = 1
+  ksize = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "model_key",
+    2: "size",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage):
+  has_start_ = 0
+  start_ = 0
+  has_end_ = 0
+  end_ = 0
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def start(self): return self.start_
+
+  def set_start(self, x):
+    self.has_start_ = 1
+    self.start_ = x
+
+  def clear_start(self):
+    if self.has_start_:
+      self.has_start_ = 0
+      self.start_ = 0
+
+  def has_start(self): return self.has_start_
+
+  def end(self): return self.end_
+
+  def set_end(self, x):
+    self.has_end_ = 1
+    self.end_ = x
+
+  def clear_end(self):
+    if self.has_end_:
+      self.has_end_ = 0
+      self.end_ = 0
+
+  def has_end(self): return self.has_end_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_start()): self.set_start(x.start())
+    if (x.has_end()): self.set_end(x.end())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_start_ != x.has_start_: return 0
+    if self.has_start_ and self.start_ != x.start_: return 0
+    if self.has_end_ != x.has_end_: return 0
+    if self.has_end_ and self.end_ != x.end_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_start_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: start not set.')
+    if (not self.has_end_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: end not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthVarInt64(self.start_)
+    n += self.lengthVarInt64(self.end_)
+    return n + 2
+
+  def Clear(self):
+    self.clear_start()
+    self.clear_end()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(8)
+    out.putVarInt64(self.start_)
+    out.putVarInt32(16)
+    out.putVarInt64(self.end_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_start(d.getVarInt64())
+        continue
+      if tt == 16:
+        self.set_end(d.getVarInt64())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_start_: res+=prefix+("start: %s\n" % self.DebugFormatInt64(self.start_))
+    if self.has_end_: res+=prefix+("end: %s\n" % self.DebugFormatInt64(self.end_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kstart = 1
+  kend = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "start",
+    2: "end",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2865,18 +4379,198 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kindex = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "index",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "index",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class ActionRequest(ProtocolBuffer.ProtocolMessage):
+  has_transaction_ = 0
+  has_action_ = 0
+
+  def __init__(self, contents=None):
+    self.transaction_ = Transaction()
+    self.action_ = Action()
+    if contents is not None: self.MergeFromString(contents)
+
+  def transaction(self): return self.transaction_
+
+  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction_
+
+  def clear_transaction(self):self.has_transaction_ = 0; self.transaction_.Clear()
+
+  def has_transaction(self): return self.has_transaction_
+
+  def action(self): return self.action_
+
+  def mutable_action(self): self.has_action_ = 1; return self.action_
+
+  def clear_action(self):self.has_action_ = 0; self.action_.Clear()
+
+  def has_action(self): return self.has_action_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+    if (x.has_action()): self.mutable_action().MergeFrom(x.action())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_transaction_ != x.has_transaction_: return 0
+    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+    if self.has_action_ != x.has_action_: return 0
+    if self.has_action_ and self.action_ != x.action_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_transaction_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: transaction not set.')
+    elif not self.transaction_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_action_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: action not set.')
+    elif not self.action_.IsInitialized(debug_strs): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(self.transaction_.ByteSize())
+    n += self.lengthString(self.action_.ByteSize())
+    return n + 2
+
+  def Clear(self):
+    self.clear_transaction()
+    self.clear_action()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.transaction_.ByteSize())
+    self.transaction_.OutputUnchecked(out)
+    out.putVarInt32(18)
+    out.putVarInt32(self.action_.ByteSize())
+    self.action_.OutputUnchecked(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_transaction().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_action().TryMerge(tmp)
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_transaction_:
+      res+=prefix+"transaction <\n"
+      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_action_:
+      res+=prefix+"action <\n"
+      res+=self.action_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  ktransaction = 1
+  kaction = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "transaction",
+    2: "action",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class ActionResponse(ProtocolBuffer.ProtocolMessage):
+
+  def __init__(self, contents=None):
+    pass
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+
+  def Equals(self, x):
+    if x is self: return 1
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    return n + 0
+
+  def Clear(self):
+    pass
+
+  def OutputUnchecked(self, out):
+    pass
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2957,20 +4651,23 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcost = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "cost",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cost",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 
-__all__ = ['Transaction','Query','Query_Filter','Query_Order','QueryExplanation','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','Schema','CompositeIndices','CommitResponse']
+__all__ = ['Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','RunCompiledQueryRequest','QueryExplanation','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','GetSchemaRequest','Schema','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','ActionRequest','ActionResponse','CommitResponse']
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/datastore/datastore_v3_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""The Python datastore protocol buffer definition.
+
+Proto2 compiler expects generated file names to follow specific pattern,
+which is not the case for the datastore_pb.py (should be datastore_v3_pb.py).
+This file with the expected name redirects to the real legacy file.
+"""
+
+
+from google.appengine.datastore.datastore_pb import *
--- a/thirdparty/google_appengine/google/appengine/datastore/entity_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/datastore/entity_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -783,6 +783,10 @@
       res+=prefix+"}\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kint64Value = 1
   kbooleanValue = 2
   kstringValue = 3
@@ -803,70 +807,51 @@
   kReferenceValuePathElementid = 16
   kReferenceValuePathElementname = 17
 
-  _TEXT = (
-   "ErrorCode",
-   "int64Value",
-   "booleanValue",
-   "stringValue",
-   "doubleValue",
-   "PointValue",
-   "x",
-   "y",
-   "UserValue",
-   "email",
-   "auth_domain",
-   "nickname",
-   "ReferenceValue",
-   "app",
-   "PathElement",
-   "type",
-   "id",
-   "name",
-   "gaiaid",
-   "obfuscated_gaiaid",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "int64Value",
+    2: "booleanValue",
+    3: "stringValue",
+    4: "doubleValue",
+    5: "PointValue",
+    6: "x",
+    7: "y",
+    8: "UserValue",
+    9: "email",
+    10: "auth_domain",
+    11: "nickname",
+    12: "ReferenceValue",
+    13: "app",
+    14: "PathElement",
+    15: "type",
+    16: "id",
+    17: "name",
+    18: "gaiaid",
+    19: "obfuscated_gaiaid",
+  }, 19)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.DOUBLE,
+    5: ProtocolBuffer.Encoder.STARTGROUP,
+    6: ProtocolBuffer.Encoder.DOUBLE,
+    7: ProtocolBuffer.Encoder.DOUBLE,
+    8: ProtocolBuffer.Encoder.STARTGROUP,
+    9: ProtocolBuffer.Encoder.STRING,
+    10: ProtocolBuffer.Encoder.STRING,
+    11: ProtocolBuffer.Encoder.STRING,
+    12: ProtocolBuffer.Encoder.STARTGROUP,
+    13: ProtocolBuffer.Encoder.STRING,
+    14: ProtocolBuffer.Encoder.STARTGROUP,
+    15: ProtocolBuffer.Encoder.STRING,
+    16: ProtocolBuffer.Encoder.NUMERIC,
+    17: ProtocolBuffer.Encoder.STRING,
+    18: ProtocolBuffer.Encoder.NUMERIC,
+    19: ProtocolBuffer.Encoder.STRING,
+  }, 19, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -888,6 +873,7 @@
   GD_PHONENUMBER =   11
   GD_POSTALADDRESS =   12
   GD_RATING    =   13
+  BLOBKEY      =   17
 
   _Meaning_NAMES = {
     14: "BLOB",
@@ -906,6 +892,7 @@
     11: "GD_PHONENUMBER",
     12: "GD_POSTALADDRESS",
     13: "GD_RATING",
+    17: "BLOBKEY",
   }
 
   def Meaning_Name(cls, x): return cls._Meaning_NAMES.get(x, "")
@@ -1092,34 +1079,33 @@
     if self.has_multiple_: res+=prefix+("multiple: %s\n" % self.DebugFormatBool(self.multiple_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kmeaning = 1
   kmeaning_uri = 2
   kname = 3
   kvalue = 5
   kmultiple = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "meaning",
-   "meaning_uri",
-   "name",
-   "multiple",
-   "value",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "meaning",
+    2: "meaning_uri",
+    3: "name",
+    4: "multiple",
+    5: "value",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1321,30 +1307,30 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kElementGroup = 1
   kElementtype = 2
   kElementid = 3
   kElementname = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "Element",
-   "type",
-   "id",
-   "name",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Element",
+    2: "type",
+    3: "id",
+    4: "name",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1447,58 +1433,24 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp = 13
   kpath = 14
 
-  _TEXT = (
-   "ErrorCode",
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   "app",
-   "path",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    13: "app",
+    14: "path",
+  }, 14)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    13: ProtocolBuffer.Encoder.STRING,
+    14: ProtocolBuffer.Encoder.STRING,
+  }, 14, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1682,34 +1634,33 @@
     if self.has_obfuscated_gaiaid_: res+=prefix+("obfuscated_gaiaid: %s\n" % self.DebugFormatString(self.obfuscated_gaiaid_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kemail = 1
   kauth_domain = 2
   knickname = 3
   kgaiaid = 4
   kobfuscated_gaiaid = 5
 
-  _TEXT = (
-   "ErrorCode",
-   "email",
-   "auth_domain",
-   "nickname",
-   "gaiaid",
-   "obfuscated_gaiaid",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "email",
+    2: "auth_domain",
+    3: "nickname",
+    4: "gaiaid",
+    5: "obfuscated_gaiaid",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2012,6 +1963,10 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 13
   kentity_group = 16
   kowner = 17
@@ -2020,64 +1975,27 @@
   kproperty = 14
   kraw_property = 15
 
-  _TEXT = (
-   "ErrorCode",
-   None,
-   None,
-   None,
-   "kind",
-   "kind_uri",
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   "key",
-   "property",
-   "raw_property",
-   "entity_group",
-   "owner",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    4: "kind",
+    5: "kind_uri",
+    13: "key",
+    14: "property",
+    15: "raw_property",
+    16: "entity_group",
+    17: "owner",
+  }, 17)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+    13: ProtocolBuffer.Encoder.STRING,
+    14: ProtocolBuffer.Encoder.STRING,
+    15: ProtocolBuffer.Encoder.STRING,
+    16: ProtocolBuffer.Encoder.STRING,
+    17: ProtocolBuffer.Encoder.STRING,
+  }, 17, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2182,22 +2100,24 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kindex_id = 1
   kvalue = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "index_id",
-   "value",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "index_id",
+    2: "value",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2443,34 +2363,33 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kentity_type = 1
   kancestor = 5
   kPropertyGroup = 2
   kPropertyname = 3
   kPropertydirection = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "entity_type",
-   "Property",
-   "name",
-   "direction",
-   "ancestor",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "entity_type",
+    2: "Property",
+    3: "name",
+    4: "direction",
+    5: "ancestor",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STARTGROUP,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2649,30 +2568,30 @@
     if self.has_state_: res+=prefix+("state: %s\n" % self.DebugFormatInt32(self.state_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp_id = 1
   kid = 2
   kdefinition = 3
   kstate = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "app_id",
-   "id",
-   "definition",
-   "state",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app_id",
+    2: "id",
+    3: "definition",
+    4: "state",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/dist/_library.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/dist/_library.py	Mon Sep 07 20:27:37 2009 +0200
@@ -75,7 +75,10 @@
 
 PACKAGES = {
     'django': (DjangoVersion,
-               {'1.0': None, '0.96': None}),
+               {'0.96': None,
+                '1.0': None,
+                '1.1': None,
+                }),
 
 
 
--- a/thirdparty/google_appengine/google/appengine/dist/httplib.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/dist/httplib.py	Mon Sep 07 20:27:37 2009 +0200
@@ -171,7 +171,7 @@
     self._url = selector
 
   def putheader(self, header, *lines):
-    line = '\r\n\t'.join(lines)
+    line = '\r\n\t'.join([str(line) for line in lines])
     self.headers.append((header, line))
 
   def endheaders(self):
@@ -295,7 +295,7 @@
 
   def putheader(self, header, *values):
     "The superclass allows only one value argument."
-    self._conn.putheader(header, '\r\n\t'.join(values))
+    self._conn.putheader(header, '\r\n\t'.join([str(v) for v in values]))
 
   def getreply(self):
     """Compat definition since superclass does not define it.
--- a/thirdparty/google_appengine/google/appengine/dist/py_zipimport.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/dist/py_zipimport.py	Mon Sep 07 20:27:37 2009 +0200
@@ -220,11 +220,14 @@
 
   def get_data(self, fullpath):
     """Return (binary) content of a data file in the zipfile."""
-    required_prefix = os.path.join(self.archive, '')
-    if not fullpath.startswith(required_prefix):
-      raise IOError('Path %r doesn\'t start with zipfile name %r' %
-                    (fullpath, required_prefix))
-    relpath = fullpath[len(required_prefix):]
+    prefix = os.path.join(self.archive, '')
+    if fullpath.startswith(prefix):
+      relpath = fullpath[len(prefix):]
+    elif os.path.isabs(fullpath):
+      raise IOError('Absolute path %r doesn\'t start with zipfile name %r' %
+                    (fullpath, prefix))
+    else:
+      relpath = fullpath
     try:
       return self.zipfile.read(relpath)
     except KeyError:
--- a/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -119,6 +119,8 @@
       'interactive_execute_path': base_path + InteractiveExecuteHandler.PATH,
       'memcache_path': base_path + MemcachePageHandler.PATH,
       'queues_path': base_path + QueuesPageHandler.PATH,
+      'xmpp_path': base_path + XMPPPageHandler.PATH,
+      'inboundmail_path': base_path + InboundMailPageHandler.PATH,
     }
     if HAVE_CRON:
       values['cron_path'] = base_path + CronPageHandler.PATH
@@ -248,6 +250,34 @@
     self.generate('cron.html', values)
 
 
+class XMPPPageHandler(BaseRequestHandler):
+  """Tests XMPP requests."""
+  PATH = '/xmpp'
+
+  def get(self):
+    """Shows template displaying the XMPP."""
+    xmpp_configured = True
+    values = {
+      'xmpp_configured': xmpp_configured,
+      'request': self.request
+    }
+    self.generate('xmpp.html', values)
+
+
+class InboundMailPageHandler(BaseRequestHandler):
+  """Tests Mail requests."""
+  PATH = '/inboundmail'
+
+  def get(self):
+    """Shows template displaying the Inbound Mail form."""
+    inboundmail_configured = True
+    values = {
+      'inboundmail_configured': inboundmail_configured,
+      'request': self.request
+    }
+    self.generate('inboundmail.html', values)
+
+
 class QueuesPageHandler(BaseRequestHandler):
   """Shows information about configured (and default) task queues."""
   PATH = '/queues'
@@ -1214,7 +1244,12 @@
 
 
 def _ParseCronYaml():
-  """Load the cron.yaml file and parse it."""
+  """Loads the cron.yaml file and parses it.
+
+  The CWD of the dev_appserver is the root of the application here.
+
+  Returns a dict representing the contents of cron.yaml.
+  """
   cronyaml_files = 'cron.yaml', 'cron.yml'
   for cronyaml in cronyaml_files:
     try:
@@ -1240,6 +1275,8 @@
     ('.*' + ImageHandler.PATH, ImageHandler),
     ('.*' + QueuesPageHandler.PATH, QueuesPageHandler),
     ('.*' + TasksPageHandler.PATH, TasksPageHandler),
+    ('.*' + XMPPPageHandler.PATH, XMPPPageHandler),
+    ('.*' + InboundMailPageHandler.PATH, InboundMailPageHandler),
     ('.*', DefaultPageHandler),
   ]
   if HAVE_CRON:
--- a/thirdparty/google_appengine/google/appengine/ext/admin/templates/base.html	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/base.html	Mon Sep 07 20:27:37 2009 +0200
@@ -10,29 +10,29 @@
   </head>
   <body {% block bodyattributes %}{% endblock %}>
     <div class="g-doc">
-    
+
     <div id="hd" class="g-section">
 
       <div class="g-section">
         <img id="ae-logo" src="./images/google.gif" width="153" height="47"
          alt="Google App Engine"/>
       </div>
-      
+
       <div id="ae-appbar-lrg" class="g-section">
         <h1>{{ application_name }} Development Console</h1>
       </div>
-      
+
     </div>
-    
-    
+
+
     <div id="bd" class="g-section">
-    
+
       <div class="g-section g-tpl-160">
-    
+
         <div id="ae-lhs-nav" class="g-unit g-first">
-    
+
           <div id="ae-nav" class="g-c">
-        
+
             <ul id="menu">
               <li><a href="{{ datastore_path }}">Datastore Viewer</a></li>
               <li><a href="{{ interactive_path }}">Interactive Console</a></li>
@@ -41,18 +41,22 @@
               {% if cron_path %}
               <li><a href="{{ cron_path }}">Cron Jobs</a></li>
               {% endif %}
+              <li><a href="{{ xmpp_path }}">XMPP</a></li>
+              {% comment %}
+              <li><a href="{{ inboundmail_path }}">Inbound Mail</a></li>
+              {% endcomment %}
             </ul>
-        
+
           </div>
-        
+
         </div>
-        
+
         <div id="ae-content" class="g-unit">
           {% block body %}{% endblock %}
         </div>
-    
+
     </div>
-    
+
         <div id="ft">
           <p>
             &copy;2009 Google
@@ -83,7 +87,7 @@
     function makeSelected(e) {
       e.className = "ae-nav-selected";
     }
-    
+
     walk(document.getElementById("menu"), isCurrentLink, makeSelected);
 
     //]]>
--- a/thirdparty/google_appengine/google/appengine/ext/admin/templates/css/ae.css	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/css/ae.css	Mon Sep 07 20:27:37 2009 +0200
@@ -151,4 +151,20 @@
 }
 .ae-table .ae-pager {
   background-color: #c5d7ef;
-}
\ No newline at end of file
+}
+
+.ae-errorbox {
+  border: 1px solid #f00;
+  background-color: #fee;
+  margin-bottom: 1em;
+  padding: 1em;
+  display: inline-block;
+}
+
+.ae-message {
+  border: 1px solid #e5ecf9;
+  background-color: #f6f9ff;
+  margin-bottom: 1em;
+  padding: 1em;
+  display: inline-block;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/css/inboundmail.css	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,19 @@
+#inboundmail label {
+  display: block;
+  font-weight: bold;
+}
+#inboundmail legend {
+  font-weight: bold;
+}
+#inboundmail .radio label {
+  display: inline;
+  font-weight: normal;
+}
+
+#inboundmail fieldset,
+#inboundmail .fieldset {
+  margin-bottom: 8px;
+}
+#inboundmail-submit {
+  margin-top: 2em;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/css/xmpp.css	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,19 @@
+#xmpp label {
+  display: block;
+  font-weight: bold;
+}
+#xmpp legend {
+  font-weight: bold;
+}
+#xmpp .radio label {
+  display: inline;
+  font-weight: normal;
+}
+
+#xmpp fieldset,
+#xmpp .fieldset {
+  margin-bottom: 8px;
+}
+#xmpp-submit {
+  margin-top: 2em;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/inboundmail.html	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,158 @@
+{% extends "base.html" %}
+
+{% block title %}{{ application_name }} Development Console - Inbound Mail{% endblock %}
+
+{% block breadcrumbs %}
+  <span class="item"><a href="">Email</a></span>
+{% endblock %}
+
+{% block head %}
+
+  <style type="text/css">{% include "css/inboundmail.css" %}</style>
+  <script type="text/javascript">
+    {% include "js/webhook.js" %}
+    {% include "js/multipart_form_data.js" %}
+    {% include "js/rfc822_date.js" %}
+
+    var feedbackEl;
+    var formEl;
+    var payloadEl;
+    var fromEl;
+    var toEl;
+    var ccEl;
+    var subjectEl;
+    var bodyEl;
+    var contentLengthEl;
+    //var contentTypeEl;
+
+    var sendInboundMailWebhook = function() {
+
+      if (!feedbackEl) {
+        feedbackEl = document.getElementById('feedback');
+        formEl = document.getElementById('inboundmail-form');
+        fromEl = document.getElementById('from');
+        toEl = document.getElementById('to');
+        ccEl = document.getElementById('cc');
+        subjectEl = document.getElementById('subject');
+        bodyEl = document.getElementById('body');
+        payloadEl = document.getElementById('payload');
+        contentLengthEl = document.getElementById('content-length');
+      }
+
+      var from = fromEl.value;
+      var to = toEl.value;
+      var cc = ccEl.value;
+      var subject = subjectEl.value;
+      var body = bodyEl.value;
+
+      if (!to || !from || !body) {
+        feedbackEl.className = 'ae-errorbox';
+        feedbackEl.innerHTML = 'From, To and Message body are required.';
+        return;
+      }
+
+      feedbackEl.className = 'ae-message';
+      feedbackEl.innerHTML = 'Sending mail message...';
+
+      var mpfd = new MultipartFormData();
+      mpfd.addHeader('MIME-Version', '1.0');
+      mpfd.addHeader('Date', RFC822Date.format(new Date()));
+      mpfd.addHeader('From', from);
+      mpfd.addHeader('To', to);
+      if (cc) {
+        mpfd.addHeader('Cc', cc);
+      }
+      mpfd.addHeader('Subject', subject);
+      mpfd.addHeader('Content-Type', 'multipart/alternative; ' +
+          'boundary=' + mpfd.boundary);
+      mpfd.addPart(null, body, 'text/plain; charset=UTF-8');
+      mpfd.addPart(null, body, 'text/html; charset=UTF-8');
+
+      payloadEl.value = mpfd.toString();
+
+      contentLengthEl = payloadEl.value.length;
+
+      formEl.action = '/_ah/mail/' + escape(to);
+
+      (new Webhook('inboundmail-form')).run(handleInboundMailResult);
+
+      // Prevents actual form posts.
+      return false;
+    };
+
+    var handleInboundMailResult = function(hook, req, error) {
+      if (error != null || req == null || req.status != 200) {
+        feedbackEl.className = 'ae-errorbox';
+        feedbackEl.innerHTML = 'Message send failure<br>' +
+            req.responseText;
+      } else {
+        var timestamp;
+        var dateString = new Date().toString();
+        var match = dateString.match(/(\d\d:\d\d:\d\d).+\((.+)\)/);
+        if (!match || !match[0] || !match[2]) {
+          timestamp = dateString;
+        } else {
+          timestamp = match[1] + ' ' + match[2];
+        }
+
+        feedbackEl.className = 'ae-message';
+        feedbackEl.innerHTML = 'Message has been sent at ' + timestamp;
+      }
+    };
+
+  </script>
+{% endblock %}
+
+{% block body %}
+<div id="inboundmail">
+  <h3>Email</h3>
+  {% if inboundmail_configured %}{% else %}
+    <div class="ae-errorbox">
+      Inbound mail is not yet configured properly in your app.yaml in the services section.
+    </div>
+  {% endif %}
+  <div id="feedback"></div>
+  <form id="inboundmail-form"
+    action="/_ah/mail/" method="post"
+    onsubmit="sendInboundMailWebhook(); return false">
+
+    <input type="hidden" name="payload" id="payload">
+    <input type="hidden" id="content-type" name="header:Content-Type" value="message/rfc822">
+    <input type="hidden" id="content-length" name="header:Content-Length">
+
+    <div class="fieldset">
+      <label for="from">From:</label>
+      <input type="text" id="from" name="from" size="40">
+    </div>
+
+    <div class="fieldset">
+      <label for="to">To:</label>
+      <input type="text" id="to" name="to" size="40">
+    </div>
+
+    <div class="fieldset">
+      <label for="cc">Cc:</label>
+      <input type="text" id="cc" name="cc" size="40">
+    </div>
+
+    <div class="fieldset">
+      <label for="subject">Subject:</label>
+      <input type="text" id="subject" name="subject" size="40">
+    </div>
+
+    <div id="body-c" class="fieldset">
+      <label for="body">Message body (plain text):</label>
+      <textarea id="body" name="body" rows="10" cols="50"></textarea>
+    </div>
+
+    <div id="inboundmail-submit">
+      <input type="submit" value="Send Email">
+    </div>
+
+  </form>
+</div>
+
+{% endblock %}
+
+{% block final %}
+{% endblock %}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/js/multipart_form_data.js	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,125 @@
+// Copyright 2009 Google Inc.  All Rights Reserved.
+
+/**
+ * A multipart form data construction class for XHR.
+ * @see http://www.w3.org/Protocols/rfc1341/7_2_Multipart.html
+ * @constructor
+ */
+var MultipartFormData = function() {
+  /**
+   * @type {Array}
+   */
+  this.headers = [];
+
+  /**
+   * @type {Array}
+   */
+  this.parts = [];
+
+  /**
+   * A random string for the boundary.
+   * @type {string}
+   */
+  this.boundary = MultipartFormData.getRandomBoundary();
+};
+
+
+/**
+ * @type {string}
+ */
+MultipartFormData.CRLF = '\r\n';
+
+
+/**
+ * @type {string}
+ * @private
+ */
+MultipartFormData.TEN_CHARS_ =
+
+
+/**
+ * Generates a random number and some random characters from it.
+ */
+MultipartFormData.getRandomBoundary = function() {
+  var anyTenCharacters = 'DiStRIcT10';
+  var randomNumber = Math.floor(Math.random() * 10000000);
+  var nums = randomNumber.toString().split('');
+  var randomChars = '';
+  for (var i = 0, num; num = nums[i]; i++) {
+    randomChars += anyTenCharacters[num];
+  }
+  return randomChars + '-' + randomNumber;
+};
+
+
+/**
+ * @param {string} name The name for this header.
+ * @param {string} value The value for this header.
+ */
+MultipartFormData.prototype.addHeader = function(name, value) {
+  this.headers.push({
+    'name': name,
+    'value': value
+  });
+};
+
+
+/**
+ * @param {?string} name The name for this part.
+ * @param {string} value The value for this part.
+ * @param {string} opt_contentType Content-type for this part.
+ * @param {string} opt_contentDisposition Content disposition for this part.
+ * @param {string} opt_filename The filename for this part
+ */
+MultipartFormData.prototype.addPart = function(name, value, opt_contentType,
+    opt_contentDisposition, opt_filename) {
+  var contentType = opt_contentType || null;
+  var contentDisposition = opt_contentDisposition || null;
+  var filename = opt_filename || null;
+  this.parts.push({
+    'name': name,
+    'value': value,
+    'contentType': contentType,
+    'contentDisposition': contentDisposition,
+    'filename': filename
+  });
+};
+
+/**
+ * @return {string} The string to set as a payload.
+ */
+MultipartFormData.prototype.toString = function() {
+  var lines = [];
+
+  for (var i = 0, header; header = this.headers[i]; i++) {
+    lines.push(header['name'] + ': ' + header['value']);
+  }
+  if (this.headers.length > 0) {
+    lines.push('');
+  }
+
+  for (var i = 0, part; part = this.parts[i]; i++) {
+    lines.push('--' + this.boundary);
+
+    if (part['contentDisposition']) {
+      var contentDisposition = 'Content-Disposition: form-data; ';
+      contentDisposition += 'name="' + part['name'] + '"';
+      if (part['filename']) {
+        contentDisposition += '; filename="' + part['filename'] + '"';
+      }
+      lines.push(contentDisposition);
+    }
+
+    if (part['contentType']) {
+      lines.push('Content-Type: ' + part['contentType']);
+    }
+
+    lines.push('');
+    lines.push(part['value']);
+  }
+
+  lines.push('--' + this.boundary + '--');
+
+  return lines.join(MultipartFormData.CRLF) + MultipartFormData.CRLF;
+};
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/js/rfc822_date.js	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,70 @@
+// Copyright 2009 Google Inc.  All Rights Reserved.
+
+var RFC822Date = {};
+
+/**
+ * Return a DateTime in RFC822 format.
+ * @see http://www.w3.org/Protocols/rfc822/#z28
+ * @param {Date} date A Date object.
+ * @param {string} opt_tzo The timezone offset.
+ */
+RFC822Date.format = function(date, opt_tzo) {
+  var tzo = opt_tzo || RFC822Date.getTZO(date.getTimezoneOffset());
+  var rfc822Date = RFC822Date.DAYS[date.getDay()] + ', ';
+  rfc822Date += RFC822Date.padZero(date.getDate()) + ' ';
+  rfc822Date += RFC822Date.MONTHS[date.getMonth()] + ' ';
+  rfc822Date += date.getFullYear() + ' ';
+  rfc822Date += RFC822Date.padZero(date.getHours()) + ':';
+  rfc822Date += RFC822Date.padZero(date.getMinutes()) + ':';
+  rfc822Date += RFC822Date.padZero(date.getSeconds()) + ' ' ;
+  rfc822Date += tzo;
+  return rfc822Date;
+};
+
+
+/**
+ * @type {Array}
+ */
+RFC822Date.MONTHS = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+                     'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
+
+
+/**
+ * @type {Array}
+ */
+RFC822Date.DAYS = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
+
+
+/**
+ * Pads a value with a 0 if it is less than 10;
+ * @param {number|string}
+ * @return {string}
+ */
+RFC822Date.padZero = function(val) {
+  val = val + ''; // cast into string
+  if (val.length < 2) {
+    val = '0' + val;
+  }
+  return val;
+};
+
+
+/**
+ * Returns a timezone offset in the format +|-dddd.
+ * @param {String} tzo A time zone offset from GMT in minutes.
+ * @return {string} The time zone offset as a string.
+ */
+RFC822Date.getTZO = function(tzo) {
+  var hours = Math.floor(tzo / 60);
+  var tzoFormatted = hours > 0 ? '-' : '+';
+
+  var absoluteHours = Math.abs(hours);
+  tzoFormatted += absoluteHours < 10 ? '0' : '';
+  tzoFormatted += absoluteHours;
+
+  var moduloMinutes = Math.abs(tzo % 60);
+  tzoFormatted += moduloMinutes == 0 ? '00' : moduloMinutes
+
+  return tzoFormatted;
+};
+
--- a/thirdparty/google_appengine/google/appengine/ext/admin/templates/js/webhook.js	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/js/webhook.js	Mon Sep 07 20:27:37 2009 +0200
@@ -33,7 +33,7 @@
       this.payload = value;
     }
   }
-  
+
   if (this.action == '') {
     return 'action not found';
   }
@@ -61,7 +61,20 @@
     callback(this, req, e);
     return;
   }
-  callback(this, req, null);
+
+  // If the responseText matches our <form action="/_ah/login then the
+  // user is not logged in as an Administrator so we'll fake the request.
+  if (req.responseText.match(/<form[^>]+_ah\/login/)) {
+    var fakeReq = {
+      'status': 403,
+      'responseText': 'Current logged in user is not authorized ' +
+                      'to view this page'
+    }
+    fakeReq.getAllResponseHeaders = function(){};
+    callback(this, fakeReq, null);
+  } else {
+    callback(this, req, null);
+  }
 };
 
 Webhook.prototype.run = function(callback) {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/xmpp.html	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,234 @@
+{% extends "base.html" %}
+
+{% block title %}{{ application_name }} Development Console - XMPP{% endblock %}
+
+{% block breadcrumbs %}
+  <span class="item"><a href="">XMPP</a></span>
+{% endblock %}
+
+{% block head %}
+  <style type="text/css">{% include "css/xmpp.css" %}</style>
+  <script type="text/javascript">
+    {% include "js/webhook.js" %}
+    {% include "js/multipart_form_data.js" %}
+
+    var xmppFeedbackEl;
+    var xmppForm;
+    var payloadEl;
+    var fromEl;
+    var toEl;
+    var chatEl;
+    var contentLengthEl;
+    var contentTypeEl;
+
+    var sendXmppWebhook = function() {
+
+      if (!xmppFeedbackEl) {
+        xmppFeedbackEl = document.getElementById('xmpp-feedback');
+        xmppForm = document.getElementById('xmpp-form');
+        fromEl = document.getElementById('from');
+        toEl = document.getElementById('to');
+        chatEl = document.getElementById('chat');
+        payloadEl = document.getElementById('payload');
+        contentTypeEl = document.getElementById('content-type');
+      }
+
+      var to = toEl.value;
+      var from = fromEl.value;
+      var body = chatEl.value;
+
+      if (!to || !from) {
+        xmppFeedbackEl.className = 'ae-errorbox';
+        xmppFeedbackEl.innerHTML = 'From and To are required.';
+        return;
+      }
+
+      xmppFeedbackEl.className = 'ae-message';
+      xmppFeedbackEl.innerHTML = 'Sending XMPP message...';
+
+      var formData = new MultipartFormData();
+      formData.addPart('to', to, null, 'form-data');
+      formData.addPart('from', from, null, 'form-data');
+      formData.addPart('body', body, null, 'form-data');
+      formData.addPart('stanza', buildXmlStanza(from, to, body), 'text/xml', 'form-data');
+
+      payloadEl.value = formData.toString();
+      contentTypeEl.value = 'multipart/form-data; boundary=' +
+          formData.boundary;
+
+      (new Webhook('xmpp-form')).run(handleXmppResult);
+
+      // Prevents actual form posts.
+      return false;
+    };
+
+    var handleXmppResult = function(hook, req, error) {
+      if (error != null || req == null || req.status != 200) {
+        xmppFeedbackEl.className = 'ae-errorbox';
+        xmppFeedbackEl.innerHTML = 'Message send failure<br>' +
+            req.responseText;
+      } else {
+        var timestamp;
+        var dateString = new Date().toString();
+        var match = dateString.match(/(\d\d:\d\d:\d\d).+\((.+)\)/);
+        if (!match || !match[0] || !match[2]) {
+          timestamp = dateString;
+        } else {
+          timestamp = match[1] + ' ' + match[2];
+        }
+
+        xmppFeedbackEl.className = 'ae-message';
+        xmppFeedbackEl.innerHTML = 'Message has been sent at ' + timestamp;
+      }
+    };
+
+    var buildXmlStanza = function(from, to, body) {
+      var xml = '<message from="' + from + '" '+
+          'to="' + to + '">' +
+          '<body>' + body + '</body>' +
+          '</message>';
+      return xml;
+    };
+  </script>
+{% endblock %}
+
+{% block body %}
+<div id="xmpp">
+  <h3>XMPP</h3>
+  {% if xmpp_configured %}{% else %}
+    <div class="ae-errorbox">
+      XMPP is not yet configured properly in your app.yaml, in the services section.
+    </div>
+  {% endif %}
+  <div id="xmpp-feedback"></div>
+  <form id="xmpp-form"
+    action="/_ah/xmpp/message/chat/" method="post"
+    onsubmit="sendXmppWebhook(); return false">
+
+    <input type="hidden" name="payload" id="payload">
+    <input type="hidden" id="content-type" name="header:Content-Type">
+
+    <fieldset>
+      <input type="hidden" name="message_type" id="message-type-chat" value="chat">
+      <!--
+      <legend>Message Type:</legend>
+      <div class="radio">
+        <input type="radio" name="message_type" id="message-type-chat" value="chat">
+        <label for="message-type-chat">Chat message</label>
+      </div>
+
+      <div class="radio">
+        <input type="radio" name="message_type" id="message-type-xml" value="xml">
+        <label for="message-type-xml">XML stanza</label>
+      </div>
+
+      <div class="radio">
+        <input type="radio" name="message_type" id="message-type-presence" value="presence">
+        <label for="message-type-presence">Presence</label>
+      </div>
+      -->
+    </fieldset>
+
+    <div class="fieldset">
+      <label for="from">From:</label>
+      <input type="text" id="from" name="from" size="40">
+    </div>
+
+
+    <div class="fieldset">
+      <label for="to">To:</label>
+      <input type="text" id="to" name="to" size="40">
+    </div>
+
+
+    <div id="chat-c" class="fieldset">
+      <label for="chat">Chat (plain text):</label>
+      <textarea id="chat" name="chat" rows="10" cols="50"></textarea>
+    </div>
+
+    <!--
+    <div id="xml-c" class="fieldset">
+      <label for="xml">XML Stanza:</label>
+      <textarea id="xml" name="xml" rows="10" cols="50"></textarea>
+    </div>
+
+
+    <fieldset id="presence-c">
+      <legend>Presence:</legend>
+
+      <div class="radio">
+        <input type="radio" id="presence-online" name="presence" value="online">
+        <label for="presence-online">Online</label>
+      </div>
+
+      <div class="radio">
+        <input type="radio" id="presence-offline" name="presence" value="offline">
+        <label for="presence-offline">Offline</label>
+      </div>
+    </div>
+    -->
+
+    <div id="xmpp-submit">
+      <input type="submit" value="Send Message">
+    </div>
+
+  </form>
+</div>
+<!--
+<script type="text/javascript">
+  var messageTypes = ['chat', 'xml', 'presence'];
+
+  var messageTypeEls = [];
+  for (var i = 0, messageType; messageType = messageTypes[i]; i++) {
+    var messageTypeEl = document.getElementById('message-type-' +
+        messageType);
+    messageTypeEls.push(messageTypeEl);
+  }
+
+  // Initializes the chosen type to be the first radio.
+  var chosenMessageTypeId = messageTypeEls[0].id;
+
+  var messageTypeDict = {};
+  for (var i = 0, messageTypeEl; messageTypeEl = messageTypeEls[i]; i++) {
+    var type = messageTypeEl.id.replace('message-type-', '');
+    var formEl = document.getElementById(type + '-c');
+    messageTypeDict[messageTypeEl.id] = formEl;
+    // Initially hides all of the conditional form elements.
+    formEl.style.display = 'none';
+  }
+
+  var setChosenMessageType = function(messageTypeId) {
+    document.getElementById(messageTypeId).checked = true;
+
+    // Hides previously chosen message type
+    messageTypeDict[chosenMessageTypeId].style.display = 'none';
+
+    // Sets the new chosen type and shows its field.
+    chosenMessageTypeId = messageTypeId;
+    messageTypeDict[chosenMessageTypeId].style.display = '';
+  }
+
+  var messageTypeClickHandler = function(e) {
+    for (var i = 0, messageTypeEl; messageTypeEl = messageTypeEls[i]; i++) {
+      if (messageTypeEl.checked) {
+        setChosenMessageType(messageTypeEl.id);
+        break;
+      }
+    }
+  };
+
+  // set up event listeners
+  for (var i = 0, messageTypeEl; messageTypeEl = messageTypeEls[i]; i++) {
+    messageTypeEl.onclick = messageTypeClickHandler;
+  }
+
+  // Init
+  setChosenMessageType(chosenMessageTypeId);
+
+</script>
+-->
+
+{% endblock %}
+
+{% block final %}
+{% endblock %}
--- a/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -122,6 +122,7 @@
 Text = datastore_types.Text
 Blob = datastore_types.Blob
 ByteString = datastore_types.ByteString
+BlobKey = datastore_types.BlobKey
 
 _kind_map = {}
 
@@ -186,6 +187,7 @@
     PhoneNumber,
     PostalAddress,
     Rating,
+    BlobKey,
     ])
 
 _ALLOWED_EXPANDO_PROPERTY_TYPES = set(_ALLOWED_PROPERTY_TYPES)
@@ -241,6 +243,49 @@
         "definition." % locals())
 
 
+def query_descendants(model_instance):
+  """Returns a query for all the descendants of a model instance.
+
+  Args:
+    model_instance: Model instance to find the descendants of.
+
+  Returns:
+    Query that will retrieve all entities that have the given model instance
+  as an ancestor. Unlike normal ancestor queries, this does not include the
+  ancestor itself.
+  """
+
+  result = Query().ancestor(model_instance);
+  result.filter(datastore_types._KEY_SPECIAL_PROPERTY + ' >',
+                model_instance.key());
+  return result;
+
+
+def model_to_protobuf(model_instance, _entity_class=datastore.Entity):
+  """Encodes a model instance as a protocol buffer.
+
+  Args:
+    model_instance: Model instance to encode.
+  Returns:
+    entity_pb.EntityProto representation of the model instance
+  """
+  return model_instance._populate_entity(_entity_class).ToPb()
+
+
+def model_from_protobuf(pb, _entity_class=datastore.Entity):
+  """Decodes a model instance from a protocol buffer.
+
+  Args:
+    pb: The protocol buffer representation of the model instance. Can be an
+        entity_pb.EntityProto or str encoding of an entity_bp.EntityProto
+
+  Returns:
+    Model instance resulting from decoding the protocol buffer
+  """
+  entity = _entity_class.FromPb(pb)
+  return class_for_kind(entity.kind()).from_entity(entity)
+
+
 def _initialize_properties(model_class, name, bases, dct):
   """Initialize Property attributes for Model-class.
 
@@ -248,17 +293,31 @@
     model_class: Model class to initialize properties for.
   """
   model_class._properties = {}
+  property_source = {}
+
+  def get_attr_source(name, cls):
+    for src_cls  in cls.mro():
+      if name in src_cls.__dict__:
+        return src_cls
+
   defined = set()
   for base in bases:
     if hasattr(base, '_properties'):
-      property_keys = base._properties.keys()
-      duplicate_properties = defined.intersection(property_keys)
-      if duplicate_properties:
-        raise DuplicatePropertyError(
-            'Duplicate properties in base class %s already defined: %s' %
-            (base.__name__, list(duplicate_properties)))
-      defined.update(property_keys)
-      model_class._properties.update(base._properties)
+      property_keys = set(base._properties.keys())
+      duplicate_property_keys = defined & property_keys
+      for dupe_prop_name in duplicate_property_keys:
+        old_source = property_source[dupe_prop_name] = get_attr_source(
+            dupe_prop_name, property_source[dupe_prop_name])
+        new_source = get_attr_source(dupe_prop_name, base)
+        if old_source != new_source:
+          raise DuplicatePropertyError(
+              'Duplicate property, %s, is inherited from both %s and %s.' %
+              (dupe_prop_name, old_source.__name__, new_source.__name__))
+      property_keys -= duplicate_property_keys
+      if property_keys:
+        defined |= property_keys
+        property_source.update(dict.fromkeys(property_keys, base))
+        model_class._properties.update(base._properties)
 
   for attr_name in dct.keys():
     attr = dct[attr_name]
@@ -557,6 +616,7 @@
   def __init__(self,
                parent=None,
                key_name=None,
+               key=None,
                _app=None,
                _from_entity=False,
                **kwds):
@@ -582,38 +642,64 @@
       parent: Parent instance for this instance or None, indicating a top-
         level instance.
       key_name: Name for new model instance.
-      _app: Intentionally undocumented.
+      key: Key instance for this instance, overrides parent and key_name
       _from_entity: Intentionally undocumented.
       args: Keyword arguments mapping to properties of model.
     """
-    if key_name == '':
-      raise BadKeyError('Name cannot be empty.')
-    elif key_name is not None and not isinstance(key_name, basestring):
-      raise BadKeyError('Name must be string type, not %s' %
-                        key_name.__class__.__name__)
-
-    if parent is not None:
-      if not isinstance(parent, (Model, Key)):
-        raise TypeError('Expected Model type; received %s (is %s)' %
-                        (parent, parent.__class__.__name__))
-      if isinstance(parent, Model) and not parent.has_key():
-        raise BadValueError(
-            "%s instance must have a complete key before it can be used as a "
-            "parent." % parent.kind())
-      if isinstance(parent, Key):
-        self._parent_key = parent
+    if key is not None:
+      if isinstance(key, (tuple, list)):
+        key = Key.from_path(*key)
+      if isinstance(key, basestring):
+        key = Key(encoded=key)
+      if not isinstance(key, Key):
+        raise TypeError('Expected Key type; received %s (is %s)' %
+                        (key, key.__class__.__name__))
+      if not key.has_id_or_name():
+        raise BadKeyError('Key must have an id or name')
+      if key.kind() != self.kind():
+        raise BadKeyError('Expected Key kind to be %s; received %s' %
+                          (self.kind(), key.kind()))
+      if _app is not None and key.app() != _app:
+        raise BadKeyError('Expected Key app to be %s; received %s' %
+                          (_app, key.app()))
+      if key_name is not None:
+        raise BadArgumentError('Cannot use key and key_name at the same time')
+      if parent is not None:
+        raise BadArgumentError('Cannot use key and parent at the same time')
+      self._key = key
+      self._key_name = None
+      self._parent = None
+      self._parent_key = None
+    else:
+      if key_name == '':
+        raise BadKeyError('Name cannot be empty.')
+      elif key_name is not None and not isinstance(key_name, basestring):
+        raise BadKeyError('Name must be string type, not %s' %
+                          key_name.__class__.__name__)
+
+      if parent is not None:
+        if not isinstance(parent, (Model, Key)):
+          raise TypeError('Expected Model type; received %s (is %s)' %
+                          (parent, parent.__class__.__name__))
+        if isinstance(parent, Model) and not parent.has_key():
+          raise BadValueError(
+              "%s instance must have a complete key before it can be used as a "
+              "parent." % parent.kind())
+        if isinstance(parent, Key):
+          self._parent_key = parent
+          self._parent = None
+        else:
+          self._parent_key = parent.key()
+          self._parent = parent
+      else:
+        self._parent_key = None
         self._parent = None
-      else:
-        self._parent_key = parent.key()
-        self._parent = parent
-    else:
-      self._parent_key = None
-      self._parent = None
+      self._key_name = key_name
+      self._key = None
+
     self._entity = None
-    self._key_name = key_name
     self._app = _app
 
-    properties = self.properties()
     for prop in self.properties().values():
       if prop.name in kwds:
         value = kwds[prop.name]
@@ -629,8 +715,9 @@
     """Unique key for this entity.
 
     This property is only available if this entity is already stored in the
-    datastore, so it is available if this entity was fetched returned from a
-    query, or after put() is called the first time for new entities.
+    datastore or if it has a full key, so it is available if this entity was
+    fetched returned from a query, or after put() is called the first time
+    for new entities, or if a complete key was given when constructed.
 
     Returns:
       Datastore key of persisted entity.
@@ -640,13 +727,12 @@
     """
     if self.is_saved():
       return self._entity.key()
+    elif self._key:
+      return self._key
     elif self._key_name:
-      if self._parent_key:
-        parent_key = self._parent_key
-      elif self._parent:
-          parent_key = self._parent.key()
       parent = self._parent_key or (self._parent and self._parent.key())
-      return Key.from_path(self.kind(), self._key_name, parent=parent)
+      self._key = Key.from_path(self.kind(), self._key_name, parent=parent)
+      return self._key
     else:
       raise NotSavedError()
 
@@ -675,8 +761,11 @@
       Populated self._entity
     """
     self._entity = self._populate_entity(_entity_class=_entity_class)
-    if hasattr(self, '_key_name'):
-      del self._key_name
+    for attr in ('_key_name', '_key'):
+      try:
+        delattr(self, attr)
+      except AttributeError:
+        pass
     return self._entity
 
   def put(self):
@@ -713,13 +802,21 @@
       entity = self._entity
     else:
       kwds = {'_app': self._app,
-              'name': self._key_name,
               'unindexed_properties': self._unindexed_properties}
-
-      if self._parent_key is not None:
-        kwds['parent'] = self._parent_key
-      elif self._parent is not None:
-        kwds['parent'] = self._parent._entity
+      if self._key is not None:
+        if self._key.id():
+          kwds['id'] = self._key.id()
+        else:
+          kwds['name'] = self._key.name()
+        if self._key.parent():
+          kwds['parent'] = self._key.parent()
+      else:
+        if self._key_name is not None:
+          kwds['name'] = self._key_name
+        if self._parent_key is not None:
+          kwds['parent'] = self._parent_key
+        elif self._parent is not None:
+          kwds['parent'] = self._parent._entity
       entity = _entity_class(self.kind(), **kwds)
 
     self._to_entity(entity)
@@ -749,14 +846,15 @@
   def has_key(self):
     """Determine if this model instance has a complete key.
 
-    Ids are not assigned until the data is saved to the Datastore, but
-    instances with a key name always have a full key.
+    When not using a fully self-assigned Key, ids are not assigned until the
+    data is saved to the Datastore, but instances with a key name always have
+    a full key.
 
     Returns:
-      True if the object has been persisted to the datastore or has a key_name,
-      otherwise False.
+      True if the object has been persisted to the datastore or has a key
+      or has a key_name, otherwise False.
     """
-    return self.is_saved() or self._key_name
+    return self.is_saved() or self._key or self._key_name
 
   def dynamic_properties(self):
     """Returns a list of all dynamic properties defined for instance."""
@@ -794,6 +892,8 @@
       return self._parent.key()
     elif self._entity is not None:
       return self._entity.parent()
+    elif self._key is not None:
+      return self._key.parent()
     else:
       return None
 
@@ -1017,8 +1117,12 @@
 
     entity_values = cls._load_entity_values(entity)
     instance = cls(None, _from_entity=True, **entity_values)
-    instance._entity = entity
-    del instance._key_name
+    if entity.is_saved():
+      instance._entity = entity
+      del instance._key_name
+      del instance._key
+    elif entity.key().has_id_or_name():
+      instance._key = entity.key()
     return instance
 
   @classmethod
@@ -1126,6 +1230,33 @@
     keys.append(key)
   datastore.Delete(keys)
 
+def allocate_ids(model, size):
+  """Allocates a range of IDs of size for the model_key defined by model
+
+  Allocates a range of IDs in the datastore such that those IDs will not
+  be automatically assigned to new entities. You can only allocate IDs
+  for model keys from your app. If there is an error, raises a subclass of
+  datastore_errors.Error.
+
+  Args:
+    model: Model, Key or string to serve as a model specifying the ID sequence
+           in which to allocate IDs
+
+  Returns:
+    (start, end) of the allocated range, inclusive.
+  """
+  models_or_keys, multiple = datastore.NormalizeAndTypeCheck(
+      model, (Model, Key, basestring))
+  keys = []
+  for model_or_key in models_or_keys:
+    if isinstance(model_or_key, Model):
+      key = model_or_key = model_or_key.key()
+    elif isinstance(model_or_key, basestring):
+      key = model_or_key = Key(model_or_key)
+    else:
+      key = model_or_key
+    keys.append(key)
+  return datastore.AllocateIds(keys, size)
 
 class Expando(Model):
   """Dynamically expandable model.
@@ -1322,7 +1453,7 @@
 class _BaseQuery(object):
   """Base class for both Query and GqlQuery."""
 
-  def __init__(self, model_class, keys_only=False):
+  def __init__(self, model_class=None, keys_only=False):
     """Constructor.
 
     Args:
@@ -1428,7 +1559,10 @@
     if self._keys_only:
       return raw
     else:
-      return [self._model_class.from_entity(e) for e in raw]
+      if self._model_class is not None:
+        return [self._model_class.from_entity(e) for e in raw]
+      else:
+        return [class_for_kind(e.kind()).from_entity(e) for e in raw]
 
   def __getitem__(self, arg):
     """Support for query[index] and query[start:stop].
@@ -1505,7 +1639,11 @@
     Raises:
       StopIteration when there are no more results in query.
     """
-    return self.__model_class.from_entity(self.__iterator.next())
+    if self.__model_class is not None:
+      return self.__model_class.from_entity(self.__iterator.next())
+    else:
+      entity = self.__iterator.next()
+      return class_for_kind(entity.kind()).from_entity(entity)
 
 
 def _normalize_query_parameter(value):
@@ -1569,7 +1707,7 @@
        print story.title
   """
 
-  def __init__(self, model_class, keys_only=False):
+  def __init__(self, model_class=None, keys_only=False):
     """Constructs a query over instances of the given Model.
 
     Args:
@@ -1586,7 +1724,11 @@
                  _multi_query_class=datastore.MultiQuery):
     queries = []
     for query_set in self.__query_sets:
-      query = _query_class(self._model_class.kind(),
+      if self._model_class is not None:
+        kind = self._model_class.kind()
+      else:
+        kind = None
+      query = _query_class(kind,
                            query_set,
                            keys_only=self._keys_only)
       query.Order(*self.__orderings)
@@ -1665,7 +1807,12 @@
     else:
       operator = '=='
 
-    if prop in self._model_class._unindexed_properties:
+    if self._model_class is None:
+      if prop != datastore_types._KEY_SPECIAL_PROPERTY:
+        raise BadQueryError(
+            'Only %s filters are allowed on kindless queries.' %
+            datastore_types._KEY_SPECIAL_PROPERTY)
+    elif prop in self._model_class._unindexed_properties:
       raise PropertyError('Property \'%s\' is not indexed' % prop)
 
     if operator.lower() == 'in':
@@ -1711,13 +1858,20 @@
     else:
       order = datastore.Query.ASCENDING
 
-    if not issubclass(self._model_class, Expando):
-      if (property not in self._model_class.properties() and
-          property not in datastore_types._SPECIAL_PROPERTIES):
-        raise PropertyError('Invalid property name \'%s\'' % property)
-
-    if property in self._model_class._unindexed_properties:
-      raise PropertyError('Property \'%s\' is not indexed' % property)
+    if self._model_class is None:
+      if (property != datastore_types._KEY_SPECIAL_PROPERTY or
+          order != datastore.Query.ASCENDING):
+        raise BadQueryError(
+            'Only %s ascending orders are supported on kindless queries' %
+            datastore_types._KEY_SPECIAL_PROPERTY)
+    else:
+      if not issubclass(self._model_class, Expando):
+        if (property not in self._model_class.properties() and
+            property not in datastore_types._SPECIAL_PROPERTIES):
+          raise PropertyError('Invalid property name \'%s\'' % property)
+
+      if property in self._model_class._unindexed_properties:
+        raise PropertyError('Property \'%s\' is not indexed' % property)
 
     self.__orderings.append((property, order))
     return self
@@ -1774,14 +1928,18 @@
     app = kwds.pop('_app', None)
 
     self._proto_query = gql.GQL(query_string, _app=app)
-    model_class = class_for_kind(self._proto_query._entity)
+    if self._proto_query._entity is not None:
+      model_class = class_for_kind(self._proto_query._entity)
+    else:
+      model_class = None
     super(GqlQuery, self).__init__(model_class,
                                    keys_only=self._proto_query._keys_only)
 
-    for property, unused in (self._proto_query.filters().keys() +
-                             self._proto_query.orderings()):
-      if property in model_class._unindexed_properties:
-        raise PropertyError('Property \'%s\' is not indexed' % property)
+    if model_class is not None:
+      for property, unused in (self._proto_query.filters().keys() +
+                               self._proto_query.orderings()):
+        if property in model_class._unindexed_properties:
+          raise PropertyError('Property \'%s\' is not indexed' % property)
 
     self.bind(*args, **kwds)
 
@@ -2404,7 +2562,6 @@
   data_type = users.User
 
 
-
 class ListProperty(Property):
   """A property that stores a list of things.
 
--- a/thirdparty/google_appengine/google/appengine/ext/db/polymodel.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/db/polymodel.py	Mon Sep 07 20:27:37 2009 +0200
@@ -87,9 +87,12 @@
         itself so that it subclasses can quickly know what the root of
         their hierarchy is and what kind they are stored in.
       __class_hierarchy__: List of classes describing the new model's place
-        in the class hierarchy.  The first element is always the root
-        element while the last element is the new class itself.  For example:
+        in the class hierarchy in reverse MRO order.  The first element is
+        always the root class while the last element is always the new class.
 
+        MRO documentation: http://www.python.org/download/releases/2.3/mro/
+
+        For example:
           class Foo(PolymorphicClass): ...
 
           class Bar(Foo): ...
@@ -107,30 +110,29 @@
     discriminator (the 'class' property of the entity) when loading from the
     datastore.
     """
-    if name == 'PolyModel' or PolyModel not in bases:
-      db._initialize_properties(cls, name, bases, dct)
-      super(db.PropertiedClass, cls).__init__(name, bases, dct)
-    else:
-      cls.__root_class__ = cls
-      super(PolymorphicClass, cls).__init__(name, bases, dct)
-
     if name == 'PolyModel':
+      super(PolymorphicClass, cls).__init__(name, bases, dct, map_kind=False)
       return
 
-    if cls is not cls.__root_class__:
-      poly_class = None
-      for base in cls.__bases__:
-        if issubclass(base, PolyModel):
-          poly_class = base
-          break
-      else:
+    elif PolyModel in bases:
+      if getattr(cls, '__class_hierarchy__', None):
+        raise db.ConfigurationError(('%s cannot derive from PolyModel as '
+            '__class_hierarchy__ is already defined.') % cls.__name__)
+      cls.__class_hierarchy__ = [cls]
+      cls.__root_class__ = cls
+      super(PolymorphicClass, cls).__init__(name, bases, dct)
+    else:
+      super(PolymorphicClass, cls).__init__(name, bases, dct, map_kind=False)
+
+      cls.__class_hierarchy__ = [c for c in reversed(cls.mro())
+          if issubclass(c, PolyModel) and c != PolyModel]
+
+      if cls.__class_hierarchy__[0] != cls.__root_class__:
         raise db.ConfigurationError(
-            "Polymorphic class '%s' does not inherit from PolyModel."
-            % cls.__name__)
-
-      cls.__class_hierarchy__ = poly_class.__class_hierarchy__ + [cls]
-    else:
-      cls.__class_hierarchy__ = [cls]
+            '%s cannot be derived from both root classes %s and %s' %
+            (cls.__name__,
+            cls.__class_hierarchy__[0].__name__,
+            cls.__root_class__.__name__))
 
     _class_map[cls.class_key()] = cls
 
@@ -310,13 +312,16 @@
     return super(PolyModel, cls).from_entity(entity)
 
   @classmethod
-  def all(cls):
+  def all(cls, **kwds):
     """Get all instance of a class hierarchy.
 
+    Args:
+      kwds: Keyword parameters passed on to Model.all.
+
     Returns:
       Query with filter set to match this class' discriminator.
     """
-    query = super(PolyModel, cls).all()
+    query = super(PolyModel, cls).all(**kwds)
     if cls != cls.__root_class__:
       query.filter(_CLASS_KEY_PROPERTY + ' =', cls.class_name())
     return query
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/deferred/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+
+
+
+from deferred import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/deferred/deferred.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,267 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A module that handles deferred execution of callables via the task queue.
+
+Tasks consist of a callable and arguments to pass to it. The callable and its
+arguments are serialized and put on the task queue, which deserializes and
+executes them. The following callables can be used as tasks:
+
+1) Functions defined in the top level of a module
+2) Classes defined in the top level of a module
+3) Instances of classes in (2) that implement __call__
+4) Instance methods of objects of classes in (2)
+5) Class methods of classes in (2)
+6) Built-in functions
+7) Built-in methods
+
+The following callables can NOT be used as tasks:
+1) Nested functions or closures
+2) Nested classes or objects of them
+3) Lambda functions
+4) Static methods
+
+The arguments to the callable, and the object (in the case of method or object
+calls) must all be pickleable.
+
+If you want your tasks to execute reliably, don't use mutable global variables;
+they are not serialized with the task and may not be the same when your task
+executes as they were when it was enqueued (in fact, they will almost certainly
+be different).
+
+If your app relies on manipulating the import path, make sure that the function
+you are deferring is defined in a module that can be found without import path
+manipulation. Alternately, you can include deferred.TaskHandler in your own
+webapp application instead of using the easy-install method detailed below.
+
+When you create a deferred task using deferred.defer, the task is serialized,
+and an attempt is made to add it directly to the task queue. If the task is too
+big (larger than about 10 kilobytes when serialized), a datastore entry will be
+created for the task, and a new task will be enqueued, which will fetch the
+original task from the datastore and execute it. This is much less efficient
+than the direct execution model, so it's a good idea to minimize the size of
+your tasks when possible.
+
+In order for tasks to be processed, you need to set up the handler. Add the
+following to your app.yaml handlers section:
+
+handlers:
+- url: /_ah/queue/deferred
+  script: $PYTHON_LIB/google/appengine/ext/deferred/__init__.py
+  login: admin
+
+By default, the deferred module uses the URL above, and the default queue.
+
+Example usage:
+
+  def do_something_later(key, amount):
+    entity = MyModel.get(key)
+    entity.total += amount
+    entity.put()
+
+  # Use default URL and queue name, no task name, execute ASAP.
+  deferred.defer(do_something_later, 20)
+
+  # Providing non-default task queue arguments
+  deferred.defer(do_something_later, 20, _queue="foo", countdown=60)
+"""
+
+
+
+
+
+import logging
+import pickle
+import types
+
+from google.appengine.api.labs import taskqueue
+from google.appengine.ext import db
+from google.appengine.ext import webapp
+from google.appengine.ext.webapp.util import run_wsgi_app
+
+
+_TASKQUEUE_HEADERS = {"Content-Type": "application/octet-stream"}
+_DEFAULT_URL = "/_ah/queue/deferred"
+_DEFAULT_QUEUE = "default"
+
+
+class Error(Exception):
+  """Base class for exceptions in this module."""
+
+
+class PermanentTaskFailure(Error):
+  """Indicates that a task failed, and will never succeed."""
+
+
+def run(data):
+  """Unpickles and executes a task.
+
+  Args:
+    data: A pickled tuple of (function, args, kwargs) to execute.
+  Returns:
+    The return value of the function invocation.
+  """
+  try:
+    func, args, kwds = pickle.loads(data)
+  except Exception, e:
+    raise PermanentTaskFailure(e)
+  else:
+    return func(*args, **kwds)
+
+
+class _DeferredTaskEntity(db.Model):
+  """Datastore representation of a deferred task.
+
+  This is used in cases when the deferred task is too big to be included as
+  payload with the task queue entry.
+  """
+  data = db.BlobProperty(required=True)
+
+
+def run_from_datastore(key):
+  """Retrieves a task from the datastore and executes it.
+
+  Args:
+    key: The datastore key of a _DeferredTaskEntity storing the task.
+  Returns:
+    The return value of the function invocation.
+  """
+  entity = _DeferredTaskEntity.get(key)
+  if not entity:
+    raise PermanentTaskFailure()
+  try:
+    ret = run(entity.data)
+    entity.delete()
+  except PermanentTaskFailure:
+    entity.delete()
+    raise
+
+
+def invoke_member(obj, membername, *args, **kwargs):
+  """Retrieves a member of an object, then calls it with the provided arguments.
+
+  Args:
+    obj: The object to operate on.
+    membername: The name of the member to retrieve from ojb.
+    args: Positional arguments to pass to the method.
+    kwargs: Keyword arguments to pass to the method.
+  Returns:
+    The return value of the method invocation.
+  """
+  return getattr(obj, membername)(*args, **kwargs)
+
+
+def _curry_callable(obj, *args, **kwargs):
+  """Takes a callable and arguments and returns a task queue tuple.
+
+  The returned tuple consists of (callable, args, kwargs), and can be pickled
+  and unpickled safely.
+
+  Args:
+    obj: The callable to curry. See the module docstring for restrictions.
+    args: Positional arguments to call the callable with.
+    kwargs: Keyword arguments to call the callable with.
+  Returns:
+    A tuple consisting of (callable, args, kwargs) that can be evaluated by
+    run() with equivalent effect of executing the function directly.
+  Raises:
+    ValueError: If the passed in object is not of a valid callable type.
+  """
+  if isinstance(obj, types.MethodType):
+    return (invoke_member, (obj.im_self, obj.im_func.__name__) + args, kwargs)
+  elif isinstance(obj, types.BuiltinMethodType):
+    if not obj.__self__:
+      return (obj, args, kwargs)
+    else:
+      return (invoke_member, (obj.__self__, obj.__name__) + args, kwargs)
+  elif isinstance(obj, types.ObjectType) and hasattr(obj, "__call__"):
+    return (obj, args, kwargs)
+  elif isinstance(obj, (types.FunctionType, types.BuiltinFunctionType,
+                        types.ClassType, types.UnboundMethodType)):
+    return (obj, args, kwargs)
+  else:
+    raise ValueError("obj must be callable")
+
+
+def serialize(obj, *args, **kwargs):
+  """Serializes a callable into a format recognized by the deferred executor.
+
+  Args:
+    obj: The callable to serialize. See module docstring for restrictions.
+    args: Positional arguments to call the callable with.
+    kwargs: Keyword arguments to call the callable with.
+  Returns:
+    A serialized representation of the callable.
+  """
+  curried = _curry_callable(obj, *args, **kwargs)
+  return pickle.dumps(curried, protocol=pickle.HIGHEST_PROTOCOL)
+
+
+def defer(obj, *args, **kwargs):
+  """Defers a callable for execution later.
+
+  The default deferred URL of /_ah/queue/deferred will be used unless an
+  alternate URL is explicitly specified. If you want to use the default URL for
+  a queue, specify _url=None. If you specify a different URL, you will need to
+  install the handler on that URL (see the module docstring for details).
+
+  Args:
+    obj: The callable to execute. See module docstring for restrictions.
+    _countdown, _eta, _name, _url, _queue: Passed through to the task queue -
+      see the task queue documentation for details.
+    args: Positional arguments to call the callable with.
+    kwargs: Any other keyword arguments are passed through to the callable.
+  """
+  taskargs = dict((x, kwargs.pop(("_%s" % x), None))
+                  for x in ("countdown", "eta", "name"))
+  taskargs["url"] = kwargs.pop("_url", _DEFAULT_URL)
+  taskargs["headers"] = _TASKQUEUE_HEADERS
+  queue = kwargs.pop("_queue", _DEFAULT_QUEUE)
+  pickled = serialize(obj, *args, **kwargs)
+  try:
+    task = taskqueue.Task(payload=pickled, **taskargs)
+    task.add(queue)
+  except taskqueue.TaskTooLargeError:
+    key = _DeferredTaskEntity(data=pickled).put()
+    pickled = serialize(run_from_datastore, str(key))
+    task = taskqueue.Task(payload=pickled, **taskargs)
+    task.add(queue)
+
+
+class TaskHandler(webapp.RequestHandler):
+  """A webapp handler class that processes deferred invocations."""
+
+  def post(self):
+    headers = ["%s:%s" % (k, v) for k, v in self.request.headers.items()
+               if k.lower().startswith("x-appengine-")]
+    logging.info(", ".join(headers))
+
+    try:
+      run(self.request.body)
+    except PermanentTaskFailure, e:
+      logging.exception("Permanent failure attempting to execute task")
+
+
+application = webapp.WSGIApplication([(".*", TaskHandler)])
+
+
+def main():
+  run_wsgi_app(application)
+
+
+if __name__ == "__main__":
+  main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/ereporter/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from ereporter import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/ereporter/ereporter.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,261 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A logging handler that records information about unique exceptions.
+
+'Unique' in this case is defined as a given (exception class, location) tuple.
+Unique exceptions are logged to the datastore with an example stacktrace and an
+approximate count of occurrences, grouped by day and application version.
+
+A cron handler, in google.appengine.ext.ereporter.report_generator, constructs
+and emails a report based on the previous day's exceptions.
+
+Example usage:
+
+In your handler script(s), add:
+
+  import logging
+  from google.appengine.ext import ereporter
+
+  ereporter.register_logger()
+
+In your app.yaml, add:
+
+  handlers:
+  - url: /_ereporter/.*
+    script: $PYTHON_LIB/google/appengine/ext/ereporter/report_generator.py
+    login: admin
+
+In your cron.yaml, add:
+
+  cron:
+  - description: Daily exception report
+    url: /_ereporter?sender=you@yourdomain.com
+    schedule: every day 00:00
+
+This will cause a daily exception report to be generated and emailed to all
+admins, with exception traces grouped by minor version. If you only want to
+get exception information for the most recent minor version, add the
+'versions=latest' argument to the query string. For other valid query string
+arguments, see report_generator.py.
+
+If you anticipate a lot of exception traces (for example, if you're deploying
+many minor versions, each of which may have its own set of exceptions), you
+can ensure that the traces from the newest minor versions get included by adding
+this to your index.yaml:
+
+  indexes:
+  - kind: __google_ExceptionRecord
+    properties:
+    - name: date
+    - name: major_version
+    - name: minor_version
+      direction: desc
+"""
+
+
+
+
+
+import datetime
+import logging
+import os
+import sha
+import traceback
+import urllib
+
+from google.appengine.api import memcache
+from google.appengine.ext import db
+from google.appengine.ext import webapp
+
+
+MAX_SIGNATURE_LENGTH = 256
+
+
+class ExceptionRecord(db.Model):
+  """Datastore model for a record of a unique exception."""
+
+  signature = db.StringProperty(required=True)
+  major_version = db.StringProperty(required=True)
+  minor_version = db.IntegerProperty(required=True)
+  date = db.DateProperty(required=True)
+  count = db.IntegerProperty(required=True, default=0)
+
+  stacktrace = db.TextProperty(required=True)
+  http_method = db.TextProperty(required=True)
+  url = db.TextProperty(required=True)
+  handler = db.TextProperty(required=True)
+
+  @classmethod
+  def get_key_name(cls, signature, version, date=None):
+    """Generates a key name for an exception record.
+
+    Args:
+      signature: A signature representing the exception and its site.
+      version: The major/minor version of the app the exception occurred in.
+      date: The date the exception occurred.
+
+    Returns:
+      The unique key name for this exception record.
+    """
+    if not date:
+      date = datetime.date.today()
+    return '%s@%s:%s' % (signature, date, version)
+
+
+class ExceptionRecordingHandler(logging.Handler):
+  """A handler that records exception data to the App Engine datastore."""
+
+  def __init__(self, log_interval=10):
+    """Constructs a new ExceptionRecordingHandler.
+
+    Args:
+      log_interval: The minimum interval at which we will log an individual
+        exception. This is a per-exception timeout, so doesn't affect the
+        aggregate rate of exception logging, only the rate at which we record
+        ocurrences of a single exception, to prevent datastore contention.
+    """
+    self.log_interval = log_interval
+    logging.Handler.__init__(self)
+
+  @classmethod
+  def __RelativePath(cls, path):
+    """Rewrites a path to be relative to the app's root directory.
+
+    Args:
+      path: The path to rewrite.
+
+    Returns:
+      The path with the prefix removed, if that prefix matches the app's
+        root directory.
+    """
+    cwd = os.getcwd()
+    if path.startswith(cwd):
+      path = path[len(cwd)+1:]
+    return path
+
+  @classmethod
+  def __GetSignature(cls, exc_info):
+    """Returns a unique signature string for an exception.
+
+    Args:
+      exc_info: The exc_info object for an exception.
+
+    Returns:
+      A unique signature string for the exception, consisting of fully
+      qualified exception name and call site.
+    """
+    ex_type, unused_value, trace = exc_info
+    frames = traceback.extract_tb(trace)
+
+    fulltype = '%s.%s' % (ex_type.__module__, ex_type.__name__)
+    path, line_no = frames[-1][:2]
+    path = cls.__RelativePath(path)
+    site = '%s:%d' % (path, line_no)
+    signature = '%s@%s' % (fulltype, site)
+    if len(signature) > MAX_SIGNATURE_LENGTH:
+      signature = 'hash:%s' % sha.new(signature).hexdigest()
+
+    return signature
+
+  @classmethod
+  def __GetURL(cls):
+    """Returns the URL of the page currently being served.
+
+    Returns:
+      The full URL of the page currently being served.
+    """
+    if os.environ['SERVER_PORT'] == '80':
+      scheme = 'http://'
+    else:
+      scheme = 'https://'
+    host = os.environ['SERVER_NAME']
+    script_name = urllib.quote(os.environ['SCRIPT_NAME'])
+    path_info = urllib.quote(os.environ['PATH_INFO'])
+    qs = os.environ.get('QUERY_STRING', '')
+    if qs:
+      qs = '?' + qs
+    return scheme + host + script_name + path_info + qs
+
+  def __GetFormatter(self):
+    """Returns the log formatter for this handler.
+
+    Returns:
+      The log formatter to use.
+    """
+    if self.formatter:
+      return self.formatter
+    else:
+      return logging._defaultFormatter
+
+  def emit(self, record):
+    """Log an error to the datastore, if applicable.
+
+    Args:
+      The logging.LogRecord object.
+        See http://docs.python.org/library/logging.html#logging.LogRecord
+    """
+    try:
+      if not record.exc_info:
+        return
+
+      signature = self.__GetSignature(record.exc_info)
+
+      if not memcache.add(signature, None, self.log_interval):
+        return
+
+      db.run_in_transaction_custom_retries(1, self.__EmitTx, signature,
+                                           record.exc_info)
+    except Exception:
+      self.handleError(record)
+
+  def __EmitTx(self, signature, exc_info):
+    """Run in a transaction to insert or update the record for this transaction.
+
+    Args:
+      signature: The signature for this exception.
+      exc_info: The exception info record.
+    """
+    today = datetime.date.today()
+    version = os.environ['CURRENT_VERSION_ID']
+    major_ver, minor_ver = version.rsplit('.', 1)
+    minor_ver = int(minor_ver)
+    key_name = ExceptionRecord.get_key_name(signature, version)
+
+    exrecord = ExceptionRecord.get_by_key_name(key_name)
+    if not exrecord:
+      exrecord = ExceptionRecord(
+          key_name=key_name,
+          signature=signature,
+          major_version=major_ver,
+          minor_version=minor_ver,
+          date=today,
+          stacktrace=self.__GetFormatter().formatException(exc_info),
+          http_method=os.environ['REQUEST_METHOD'],
+          url=self.__GetURL(),
+          handler=self.__RelativePath(os.environ['PATH_TRANSLATED']))
+
+    exrecord.count += 1
+    exrecord.put()
+
+
+def register_logger(logger=None):
+  if not logger:
+    logger = logging.getLogger()
+  handler = ExceptionRecordingHandler()
+  logger.addHandler(handler)
+  return handler
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/ereporter/report_generator.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Generates and emails daily exception reports.
+
+See google/appengine/ext/ereporter/__init__.py for usage details.
+
+Valid query string arguments to the report_generator script include:
+delete:   Set to 'false' to prevent deletion of exception records from the
+          datastore after sending a report. Defaults to 'true'.
+debug:    Set to 'true' to return the report in the response instead of
+          emailing it.
+date:     The date to generate the report for, in yyyy-mm-dd format. Defaults to
+          yesterday's date. Useful for debugging.
+max_results: Maximum number of entries to include in a report.
+sender:   The email address to use as the sender. Must be an administrator.
+to:       If specified, send reports to this address. If not specified, all
+          admins are sent the report.
+versions: 'all' to report on all minor versions, or 'latest' for the latest.
+"""
+
+
+
+
+
+import datetime
+import itertools
+import os
+import re
+from xml.sax import saxutils
+
+from google.appengine.api import mail
+from google.appengine.ext import db
+from google.appengine.ext import ereporter
+from google.appengine.ext import webapp
+from google.appengine.ext.webapp import template
+from google.appengine.ext.webapp.util import run_wsgi_app
+
+
+def isTrue(val):
+  """Determines if a textual value represents 'true'.
+
+  Args:
+    val: A string, which may be 'true', 'yes', 't', '1' to indicate True.
+  Returns:
+    True or False
+  """
+  val = val.lower()
+  return val == 'true' or val == 't' or val == '1' or val == 'yes'
+
+
+class ReportGenerator(webapp.RequestHandler):
+  """Handler class to generate and email an exception report."""
+
+  DEFAULT_MAX_RESULTS = 100
+
+  def __init__(self, send_mail=mail.send_mail,
+               mail_admins=mail.send_mail_to_admins):
+    super(ReportGenerator, self).__init__()
+
+    self.send_mail = send_mail
+    self.send_mail_to_admins = mail_admins
+
+  def GetQuery(self, order=None):
+    """Creates a query object that will retrieve the appropriate exceptions.
+
+    Returns:
+      A query to retrieve the exceptions required.
+    """
+    q = ereporter.ExceptionRecord.all()
+    q.filter('date =', self.yesterday)
+    q.filter('major_version =', self.major_version)
+    if self.version_filter.lower() == 'latest':
+      q.filter('minor_version =', self.minor_version)
+    if order:
+      q.order(order)
+    return q
+
+  def GenerateReport(self, exceptions):
+    """Generates an HTML exception report.
+
+    Args:
+      exceptions: A list of ExceptionRecord objects. This argument will be
+        modified by this function.
+    Returns:
+      An HTML exception report.
+    """
+    exceptions.sort(key=lambda e: (e.minor_version, -e.count))
+    versions = [(minor, list(excs)) for minor, excs
+                in itertools.groupby(exceptions, lambda e: e.minor_version)]
+
+    template_values = {
+        'version_filter': self.version_filter,
+        'version_count': len(versions),
+
+        'exception_count': sum(len(excs) for _, excs in versions),
+
+        'occurrence_count': sum(y.count for x in versions for y in x[1]),
+        'app_id': self.app_id,
+        'major_version': self.major_version,
+        'date': self.yesterday,
+        'versions': versions,
+    }
+    path = os.path.join(os.path.dirname(__file__), 'templates', 'report.html')
+    return template.render(path, template_values)
+
+  def SendReport(self, report):
+    """Emails an exception report.
+
+    Args:
+      report: A string containing the report to send.
+    """
+    subject = ('Daily exception report for app "%s", major version "%s"'
+               % (self.app_id, self.major_version))
+    report_text = saxutils.unescape(re.sub('<[^>]+>', '', report))
+    mail_args = {
+        'sender': self.sender,
+        'subject': subject,
+        'body': report_text,
+        'html': report,
+    }
+    if self.to:
+      mail_args['to'] = self.to
+      self.send_mail(**mail_args)
+    else:
+      self.send_mail_to_admins(**mail_args)
+
+  def get(self):
+    self.version_filter = self.request.GET.get('versions', 'all')
+    self.sender = self.request.GET['sender']
+    self.to = self.request.GET.get('to', None)
+    report_date = self.request.GET.get('date', None)
+    if report_date:
+      self.yesterday = datetime.date(*[int(x) for x in report_date.split('-')])
+    else:
+      self.yesterday = datetime.date.today() - datetime.timedelta(days=1)
+    self.app_id = os.environ['APPLICATION_ID']
+    version = os.environ['CURRENT_VERSION_ID']
+    self.major_version, self.minor_version = version.rsplit('.', 1)
+    self.minor_version = int(self.minor_version)
+    self.max_results = int(self.request.GET.get('max_results',
+                                                self.DEFAULT_MAX_RESULTS))
+    self.debug = isTrue(self.request.GET.get('debug', 'false'))
+    self.delete = isTrue(self.request.GET.get('delete', 'true'))
+
+    try:
+      exceptions = self.GetQuery(order='-minor_version').fetch(self.max_results)
+    except db.NeedIndexError:
+      exceptions = self.GetQuery().fetch(self.max_results)
+
+    if exceptions:
+      report = self.GenerateReport(exceptions)
+      if self.debug:
+        self.response.out.write(report)
+      else:
+        self.SendReport(report)
+
+      if self.delete:
+        db.delete(exceptions)
+
+
+application = webapp.WSGIApplication([('.*', ReportGenerator)])
+
+
+def main():
+  run_wsgi_app(application)
+
+
+if __name__ == '__main__':
+  main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/ereporter/templates/report.html	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,15 @@
+<!-- Unusual layout is to ensure template is useful with tags stripped, too -->
+<html><head><title>Daily exception report for app "{{app_id}}", major version "{{major_version}}".</title></head>
+<body><p>At least {{occurrence_count}} occurrences of {{exception_count}} exceptions across {{version_count}} versions.</p>
+{% for version in versions %}
+<h1>Minor version {{version.0}}</h1>
+{% for exception in version.1 %}
+<h2>{{exception.signature}} (at least {{exception.count}} occurrences)</h2>
+  <table><tr><th>Handler:</th> <td>{{exception.handler}}</td></tr>
+  <tr><th>URL:</th> <td>{{exception.method|escape}} {{exception.url|escape}}</td></tr>
+  <tr><th>Stacktrace:</th>
+
+<td><pre>{{exception.stacktrace|escape}}</pre></td></tr></table>
+
+
+{% endfor %}{% endfor %}</body>
\ No newline at end of file
--- a/thirdparty/google_appengine/google/appengine/ext/gql/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/gql/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -77,7 +77,7 @@
 
   The syntax for SELECT is fairly straightforward:
 
-  SELECT [* | __key__ ] FROM <entity>
+  SELECT [* | __key__ ] [FROM <entity>]
     [WHERE <condition> [AND <condition> ...]]
     [ORDER BY <property> [ASC | DESC] [, <property> [ASC | DESC] ...]]
     [LIMIT [<offset>,]<count>]
@@ -805,14 +805,16 @@
     Returns:
       True if parsing completed okay.
     """
-    self.__Expect('FROM')
-    entity = self.__AcceptRegex(self.__identifier_regex)
-    if entity:
-      self._entity = entity
-      return self.__Where()
+    if self.__Accept('FROM'):
+      kind = self.__AcceptRegex(self.__identifier_regex)
+      if kind:
+        self._entity = kind
+      else:
+        self.__Error('Identifier Expected')
+        return False
     else:
-      self.__Error('Identifier Expected')
-      return False
+      self._entity = None
+    return self.__Where()
 
   def __Where(self):
     """Consume the WHERE cluase.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/key_range/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,570 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Key range representation and splitting."""
+
+
+import os
+
+try:
+  import simplejson
+except ImportError:
+  simplejson = None
+
+from google.appengine.api import datastore
+from google.appengine.datastore import datastore_pb
+from google.appengine.ext import db
+
+
+class Error(Exception):
+  """Base class for exceptions in this module."""
+
+
+class KeyRangeError(Error):
+  """Error while trying to generate a KeyRange."""
+
+
+class SimplejsonUnavailableError(Error):
+  """Error while using json functionality whith unavailable simplejson."""
+
+class EmptyDbQuery(db.Query):
+  """A query that returns no results."""
+
+  def get(self):
+    return None
+
+  def fetch(self, limit=1000, offset=0):
+    return []
+
+  def count(self, limit=1000):
+    return 0
+
+
+class EmptyDatastoreQuery(datastore.Query):
+  """A query that returns no results."""
+
+  def __init__(self, kind):
+    datastore.Query.__init__(self, kind)
+
+  def _Run(self, *unused_args, **unused_kwargs):
+    empty_result_pb = datastore_pb.QueryResult()
+    empty_result_pb.set_cursor(0)
+    empty_result_pb.set_more_results(False)
+    return datastore.Iterator(empty_result_pb)
+
+  def Count(self, *unused_args, **unused_kwargs):
+    return 0
+
+  def Get(self, *unused_args, **unused_kwargs):
+    return []
+
+  def Next(self, *unused_args, **unused_kwargs):
+    return []
+
+
+class KeyRange(object):
+  """Represents a range of keys in the datastore.
+
+  A KeyRange object represents a key range
+    (key_start, include_start, key_end, include_end)
+  and a scan direction (KeyRange.DESC or KeyRange.ASC).
+  """
+
+  DESC = 'DESC'
+  ASC = 'ASC'
+
+  def __init__(self,
+               key_start=None,
+               key_end=None,
+               direction=None,
+               include_start=True,
+               include_end=True):
+    """Initialize a KeyRange object.
+
+    Args:
+      key_start: The starting key for this range.
+      key_end: The ending key for this range.
+      direction: The direction of the query for this range.
+      include_start: Whether the start key should be included in the range.
+      include_end: Whether the end key should be included in the range.
+    """
+    if direction is None:
+      direction = KeyRange.ASC
+    assert direction in (KeyRange.ASC, KeyRange.DESC)
+    self.direction = direction
+    self.key_start = key_start
+    self.key_end = key_end
+    self.include_start = include_start
+    self.include_end = include_end
+
+  def __str__(self):
+    if self.include_start:
+      left_side = '['
+    else:
+      left_side = '('
+    if self.include_end:
+      right_side = ']'
+    else:
+      right_side = '('
+    return '%s%s%s-%s%s' % (self.direction, left_side, repr(self.key_start),
+                            repr(self.key_end), right_side)
+
+  def __repr__(self):
+    return ('key_range.KeyRange(key_start=%s,key_end=%s,direction=%s,'
+            'include_start=%s,include_end=%s)') % (repr(self.key_start),
+                                                   repr(self.key_end),
+                                                   repr(self.direction),
+                                                   repr(self.include_start),
+                                                   repr(self.include_end))
+
+  def filter_query(self, query):
+    """Add query filter to restrict to this key range.
+
+    Args:
+      query: A db.Query instance.
+
+    Returns:
+      The input query restricted to this key range or an empty query if
+      this key range is empty.
+    """
+    assert isinstance(query, db.Query)
+    if self.key_start == self.key_end and not (
+        self.include_start or self.include_end):
+      return EmptyDbQuery()
+    if self.include_start:
+      start_comparator = '>='
+    else:
+      start_comparator = '>'
+    if self.include_end:
+      end_comparator = '<='
+    else:
+      end_comparator = '<'
+    if self.key_start:
+      query.filter('__key__ %s' % start_comparator, self.key_start)
+    if self.key_end:
+      query.filter('__key__ %s' % end_comparator, self.key_end)
+    return query
+
+  def filter_datastore_query(self, query):
+    """Add query filter to restrict to this key range.
+
+    Args:
+      query: A datastore.Query instance.
+
+    Returns:
+      The input query restricted to this key range or an empty query if
+      this key range is empty.
+    """
+    assert isinstance(query, datastore.Query)
+    if self.key_start == self.key_end and not (
+        self.include_start or self.include_end):
+      return EmptyDatastoreQuery(query.kind)
+    if self.include_start:
+      start_comparator = '>='
+    else:
+      start_comparator = '>'
+    if self.include_end:
+      end_comparator = '<='
+    else:
+      end_comparator = '<'
+    if self.key_start:
+      query.update({'__key__ %s' % start_comparator: self.key_start})
+    if self.key_end:
+      query.update({'__key__ %s' % end_comparator: self.key_end})
+    return query
+
+  def __get_direction(self, asc, desc):
+    """Check that self.direction is in (KeyRange.ASC, KeyRange.DESC).
+
+    Args:
+      asc: Argument to return if self.direction is KeyRange.ASC
+      desc: Argument to return if self.direction is KeyRange.DESC
+
+    Returns:
+      asc or desc appropriately
+
+    Raises:
+      KeyRangeError: if self.direction is not in (KeyRange.ASC, KeyRange.DESC).
+    """
+    if self.direction == KeyRange.ASC:
+      return asc
+    elif self.direction == KeyRange.DESC:
+      return desc
+    else:
+      raise KeyRangeError('KeyRange direction unexpected: %s', self.direction)
+
+  def make_directed_query(self, kind_class):
+    """Construct a query for this key range, including the scan direction.
+
+    Args:
+      kind_class: A kind implementation class.
+
+    Returns:
+      A db.Query instance.
+
+    Raises:
+      KeyRangeError: if self.direction is not in (KeyRange.ASC, KeyRange.DESC).
+    """
+    direction = self.__get_direction('', '-')
+    query = db.Query(kind_class)
+    query.order('%s__key__' % direction)
+
+    query = self.filter_query(query)
+    return query
+
+  def make_directed_datastore_query(self, kind):
+    """Construct a query for this key range, including the scan direction.
+
+    Args:
+      kind: A string.
+
+    Returns:
+      A datastore.Query instance.
+
+    Raises:
+      KeyRangeError: if self.direction is not in (KeyRange.ASC, KeyRange.DESC).
+    """
+    direction = self.__get_direction(datastore.Query.ASCENDING,
+                                     datastore.Query.DESCENDING)
+    query = datastore.Query(kind)
+    query.Order(('__key__', direction))
+
+    query = self.filter_datastore_query(query)
+    return query
+
+  def make_ascending_query(self, kind_class):
+    """Construct a query for this key range without setting the scan direction.
+
+    Args:
+      kind_class: A kind implementation class.
+
+    Returns:
+      A db.Query instance.
+    """
+    query = db.Query(kind_class)
+    query.order('__key__')
+
+    query = self.filter_query(query)
+    return query
+
+  def make_ascending_datastore_query(self, kind):
+    """Construct a query for this key range without setting the scan direction.
+
+    Args:
+      kind: A string.
+
+    Returns:
+      A datastore.Query instance.
+    """
+    query = datastore.Query(kind)
+    query.Order(('__key__', datastore.Query.ASCENDING))
+
+    query = self.filter_datastore_query(query)
+    return query
+
+  def split_range(self, batch_size=0):
+    """Split this key range into a list of at most two ranges.
+
+    This method attempts to split the key range approximately in half.
+    Numeric ranges are split in the middle into two equal ranges and
+    string ranges are split lexicographically in the middle.  If the
+    key range is smaller than batch_size it is left unsplit.
+
+    Note that splitting is done without knowledge of the distribution
+    of actual entities in the key range, so there is no guarantee (nor
+    any particular reason to believe) that the entities of the range
+    are evenly split.
+
+    Args:
+      batch_size: The maximum size of a key range that should not be split.
+
+    Returns:
+      A list of one or two key ranges covering the same space as this range.
+    """
+    key_start = self.key_start
+    key_end = self.key_end
+    include_start = self.include_start
+    include_end = self.include_end
+
+    key_pairs = []
+    if not key_start:
+      key_pairs.append((key_start, include_start, key_end, include_end,
+                        KeyRange.ASC))
+    elif not key_end:
+      key_pairs.append((key_start, include_start, key_end, include_end,
+                        KeyRange.DESC))
+    else:
+      key_split = KeyRange.split_keys(key_start, key_end, batch_size)
+      first_include_end = True
+      if key_split == key_start:
+        first_include_end = first_include_end and include_start
+
+      key_pairs.append((key_start, include_start,
+                        key_split, first_include_end,
+                        KeyRange.DESC))
+
+      second_include_end = include_end
+      if key_split == key_end:
+        second_include_end = False
+      key_pairs.append((key_split, False,
+                        key_end, second_include_end,
+                        KeyRange.ASC))
+
+    ranges = [KeyRange(key_start=start,
+                       include_start=include_start,
+                       key_end=end,
+                       include_end=include_end,
+                       direction=direction)
+              for (start, include_start, end, include_end, direction)
+              in key_pairs]
+
+    return ranges
+
+  def __cmp__(self, other):
+    """Compare two key ranges.
+
+    Key ranges with a value of None for key_start or key_end, are always
+    considered to have include_start=False or include_end=False, respectively,
+    when comparing.  Since None indicates an unbounded side of the range,
+    the include specifier is meaningless.  The ordering generated is total
+    but somewhat arbitrary.
+
+    Args:
+      other: An object to compare to this one.
+
+    Returns:
+      -1: if this key range is less than other.
+      0:  if this key range is equal to other.
+      1: if this key range is greater than other.
+    """
+    if not isinstance(other, KeyRange):
+      return 1
+
+    self_list = [self.key_start, self.key_end, self.direction,
+                 self.include_start, self.include_end]
+    if not self.key_start:
+      self_list[3] = False
+    if not self.key_end:
+      self_list[4] = False
+
+    other_list = [other.key_start,
+                  other.key_end,
+                  other.direction,
+                  other.include_start,
+                  other.include_end]
+    if not other.key_start:
+      other_list[3] = False
+    if not other.key_end:
+      other_list[4] = False
+
+    return cmp(self_list, other_list)
+
+  @staticmethod
+  def bisect_string_range(start, end):
+    """Returns a string that is approximately in the middle of the range.
+
+    (start, end) is treated as a string range, and it is assumed
+    start <= end in the usual lexicographic string ordering. The output key
+    mid is guaranteed to satisfy start <= mid <= end.
+
+    The method proceeds by comparing initial characters of start and
+    end.  When the characters are equal, they are appended to the mid
+    string.  In the first place that the characters differ, the
+    difference characters are averaged and this average is appended to
+    the mid string.  If averaging resulted in rounding down, and
+    additional character is added to the mid string to make up for the
+    rounding down.  This extra step is necessary for correctness in
+    the case that the average of the two characters is equal to the
+    character in the start string.
+
+    This method makes the assumption that most keys are ascii and it
+    attempts to perform splitting within the ascii range when that
+    results in a valid split.
+
+    Args:
+      start: A string.
+      end: A string such that start <= end.
+
+    Returns:
+      A string mid such that start <= mid <= end.
+    """
+    if start == end:
+      return start
+    start += '\0'
+    end += '\0'
+    midpoint = []
+    expected_max = 127
+    for i in xrange(min(len(start), len(end))):
+      if start[i] == end[i]:
+        midpoint.append(start[i])
+      else:
+        ord_sum = ord(start[i]) + ord(end[i])
+        midpoint.append(unichr(ord_sum / 2))
+        if ord_sum % 2:
+          if len(start) > i + 1:
+            ord_start = ord(start[i+1])
+          else:
+            ord_start = 0
+          if ord_start < expected_max:
+            ord_split = (expected_max + ord_start) / 2
+          else:
+            ord_split = (0xFFFF + ord_start) / 2
+          midpoint.append(unichr(ord_split))
+        break
+    return ''.join(midpoint)
+
+  @staticmethod
+  def split_keys(key_start, key_end, batch_size):
+    """Return a key that is between key_start and key_end inclusive.
+
+    This method compares components of the ancestor paths of key_start
+    and key_end.  The first place in the path that differs is
+    approximately split in half.  If the kind components differ, a new
+    non-existent kind halfway between the two is used to split the
+    space. If the id_or_name components differ, then a new id_or_name
+    that is halfway between the two is selected.  If the lower
+    id_or_name is numeric and the upper id_or_name is a string, then
+    the minumum string key u'\0' is used as the split id_or_name.  The
+    key that is returned is the shared portion of the ancestor path
+    followed by the generated split component.
+
+    Args:
+      key_start: A db.Key instance for the lower end of a range.
+      key_end: A db.Key instance for the upper end of a range.
+      batch_size: The maximum size of a range that should not be split.
+
+    Returns:
+      A db.Key instance, k, such that key_start <= k <= key_end.
+    """
+    assert key_start.app() == key_end.app()
+    path1 = key_start.to_path()
+    path2 = key_end.to_path()
+    len1 = len(path1)
+    len2 = len(path2)
+    assert len1 % 2 == 0
+    assert len2 % 2 == 0
+    out_path = []
+    min_path_len = min(len1, len2) / 2
+    for i in xrange(min_path_len):
+      kind1 = path1[2*i]
+      kind2 = path2[2*i]
+
+      if kind1 != kind2:
+        split_kind = KeyRange.bisect_string_range(kind1, kind2)
+        out_path.append(split_kind)
+        out_path.append(unichr(0))
+        break
+
+      last = (len1 == len2 == 2*(i + 1))
+
+      id_or_name1 = path1[2*i + 1]
+      id_or_name2 = path2[2*i + 1]
+      id_or_name_split = KeyRange._split_id_or_name(
+          id_or_name1, id_or_name2, batch_size, last)
+      if id_or_name1 == id_or_name_split:
+        out_path.append(kind1)
+        out_path.append(id_or_name1)
+      else:
+        out_path.append(kind1)
+        out_path.append(id_or_name_split)
+        break
+
+    return db.Key.from_path(*out_path)
+
+  @staticmethod
+  def _split_id_or_name(id_or_name1, id_or_name2, batch_size, maintain_batches):
+    """Return an id_or_name that is between id_or_name1 an id_or_name2.
+
+    Attempts to split the range [id_or_name1, id_or_name2] in half,
+    unless maintain_batches is true and the size of the range
+    [id_or_name1, id_or_name2] is less than or equal to batch_size.
+
+    Args:
+      id_or_name1: A number or string or the id_or_name component of a key
+      id_or_name2: A number or string or the id_or_name component of a key
+      batch_size: The range size that will not be split if maintain_batches
+        is true.
+      maintain_batches: A boolean for whether to keep small ranges intact.
+
+    Returns:
+      An id_or_name such that id_or_name1 <= id_or_name <= id_or_name2.
+    """
+    if (isinstance(id_or_name1, (int, long)) and
+        isinstance(id_or_name2, (int, long))):
+      if not maintain_batches or id_or_name2 - id_or_name1 > batch_size:
+        return (id_or_name1 + id_or_name2) / 2
+      else:
+        return id_or_name1
+    elif (isinstance(id_or_name1, basestring) and
+          isinstance(id_or_name2, basestring)):
+      return KeyRange.bisect_string_range(id_or_name1, id_or_name2)
+    else:
+      assert (isinstance(id_or_name1, (int, long)) and
+              isinstance(id_or_name2, basestring))
+      return unichr(0)
+
+  def to_json(self):
+    """Serialize KeyRange to json.
+
+    Returns:
+      string with KeyRange json representation.
+    """
+    if simplejson is None:
+      raise SimplejsonUnavailableError(
+          "JSON functionality requires simplejson to be available")
+
+    def key_to_str(key):
+      if key:
+        return str(key)
+      else:
+        return None
+
+    return simplejson.dumps({
+        "direction": self.direction,
+        "key_start": key_to_str(self.key_start),
+        "key_end": key_to_str(self.key_end),
+        "include_start": self.include_start,
+        "include_end": self.include_end,
+        }, sort_keys=True)
+
+
+  @staticmethod
+  def from_json(json_str):
+    """Deserialize KeyRange from its json representation.
+
+    Args:
+      json_str: string with json representation created by key_range_to_json.
+
+    Returns:
+      deserialized KeyRange instance.
+    """
+    if simplejson is None:
+      raise SimplejsonUnavailableError(
+          "JSON functionality requires simplejson to be available")
+
+    def key_from_str(key_str):
+      if key_str:
+        return db.Key(key_str)
+      else:
+        return None
+
+    json = simplejson.loads(json_str)
+    return KeyRange(key_from_str(json["key_start"]),
+                    key_from_str(json["key_end"]),
+                    json["direction"],
+                    json["include_start"],
+                    json["include_end"])
--- a/thirdparty/google_appengine/google/appengine/ext/remote_api/handler.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/remote_api/handler.py	Mon Sep 07 20:27:37 2009 +0200
@@ -48,7 +48,13 @@
 from google.appengine.api import api_base_pb
 from google.appengine.api import apiproxy_stub
 from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore_errors
+from google.appengine.api import mail_service_pb
+from google.appengine.api import urlfetch_service_pb
 from google.appengine.api import users
+from google.appengine.api.capabilities import capability_service_pb
+from google.appengine.api.images import images_service_pb
+from google.appengine.api.memcache import memcache_service_pb
 from google.appengine.datastore import datastore_pb
 from google.appengine.ext import webapp
 from google.appengine.ext.remote_api import remote_api_pb
@@ -76,6 +82,10 @@
     runquery_response = datastore_pb.QueryResult()
     apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery',
                                    request, runquery_response)
+    if runquery_response.result_size() > 0:
+      response.CopyFrom(runquery_response)
+      return
+
     next_request = datastore_pb.NextRequest()
     next_request.mutable_cursor().CopyFrom(runquery_response.cursor())
     next_request.set_count(request.limit())
@@ -154,18 +164,52 @@
 
 
 SERVICE_PB_MAP = {
+    'capability_service': {
+        'IsEnabled': (capability_service_pb.IsEnabledRequest,
+                      capability_service_pb.IsEnabledResponse),
+    },
     'datastore_v3': {
-        'Get': (datastore_pb.GetRequest, datastore_pb.GetResponse),
-        'Put': (datastore_pb.PutRequest, datastore_pb.PutResponse),
-        'Delete': (datastore_pb.DeleteRequest, datastore_pb.DeleteResponse),
-        'Count': (datastore_pb.Query, api_base_pb.Integer64Proto),
+        'Get':        (datastore_pb.GetRequest, datastore_pb.GetResponse),
+        'Put':        (datastore_pb.PutRequest, datastore_pb.PutResponse),
+        'Delete':     (datastore_pb.DeleteRequest, datastore_pb.DeleteResponse),
+        'Count':      (datastore_pb.Query, api_base_pb.Integer64Proto),
         'GetIndices': (api_base_pb.StringProto, datastore_pb.CompositeIndices),
     },
+    'images': {
+        'Transform': (images_service_pb.ImagesTransformRequest,
+                      images_service_pb.ImagesTransformResponse),
+        'Composite': (images_service_pb.ImagesCompositeRequest,
+                      images_service_pb.ImagesCompositeResponse),
+        'Histogram': (images_service_pb.ImagesHistogramRequest,
+                      images_service_pb.ImagesHistogramResponse),
+    },
+    'mail': {
+        'Send':         (mail_service_pb.MailMessage, api_base_pb.VoidProto),
+        'SendToAdmins': (mail_service_pb.MailMessage, api_base_pb.VoidProto),
+    },
+    'memcache': {
+        'Get':       (memcache_service_pb.MemcacheGetRequest,
+                      memcache_service_pb.MemcacheGetResponse),
+        'Set':       (memcache_service_pb.MemcacheSetRequest,
+                      memcache_service_pb.MemcacheSetResponse),
+        'Delete':    (memcache_service_pb.MemcacheDeleteRequest,
+                      memcache_service_pb.MemcacheDeleteResponse),
+        'Increment': (memcache_service_pb.MemcacheIncrementRequest,
+                      memcache_service_pb.MemcacheIncrementResponse),
+        'FlushAll':  (memcache_service_pb.MemcacheFlushRequest,
+                      memcache_service_pb.MemcacheFlushResponse),
+        'Stats':     (memcache_service_pb.MemcacheStatsRequest,
+                      memcache_service_pb.MemcacheStatsResponse),
+    },
     'remote_datastore': {
-        'RunQuery': (datastore_pb.Query, datastore_pb.QueryResult),
+        'RunQuery':    (datastore_pb.Query, datastore_pb.QueryResult),
         'Transaction': (remote_api_pb.TransactionRequest,
-                             datastore_pb.PutResponse),
-        'GetIDs': (remote_api_pb.PutRequest, datastore_pb.PutResponse),
+                        datastore_pb.PutResponse),
+        'GetIDs':      (remote_api_pb.PutRequest, datastore_pb.PutResponse),
+    },
+    'urlfetch': {
+        'Fetch': (urlfetch_service_pb.URLFetchRequest,
+                  urlfetch_service_pb.URLFetchResponse),
     },
 }
 
@@ -187,6 +231,7 @@
     elif 'X-appcfg-api-version' not in self.request.headers:
       self.response.set_status(403)
       self.response.out.write("This request did not contain a necessary header")
+      self.response.headers['Content-Type'] = 'text/plain'
       return False
     return True
 
@@ -202,6 +247,7 @@
         'rtok': rtok
         }
 
+    self.response.headers['Content-Type'] = 'text/plain'
     self.response.out.write(yaml.dump(app_info))
 
   def post(self):
@@ -221,6 +267,10 @@
       logging.exception('Exception while handling %s', request)
       self.response.set_status(200)
       response.mutable_exception().set_contents(pickle.dumps(e))
+      if isinstance(e, datastore_errors.Error):
+        application_error = response.mutable_application_error()
+        application_error.setCode(e.application_error)
+        application_error.setDetail(e.error_detail)
     self.response.out.write(response.Encode())
 
   def ExecuteRequest(self, request):
--- a/thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_pb.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_pb.py	Mon Sep 07 20:27:37 2009 +0200
@@ -155,29 +155,144 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kservice_name = 2
   kmethod = 3
   krequest = 4
 
-  _TEXT = (
-   "ErrorCode",
-   None,
-   "service_name",
-   "method",
-   "request",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    2: "service_name",
+    3: "method",
+    4: "request",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class ApplicationError(ProtocolBuffer.ProtocolMessage):
+  has_code_ = 0
+  code_ = 0
+  has_detail_ = 0
+  detail_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def code(self): return self.code_
+
+  def set_code(self, x):
+    self.has_code_ = 1
+    self.code_ = x
+
+  def clear_code(self):
+    if self.has_code_:
+      self.has_code_ = 0
+      self.code_ = 0
+
+  def has_code(self): return self.has_code_
+
+  def detail(self): return self.detail_
+
+  def set_detail(self, x):
+    self.has_detail_ = 1
+    self.detail_ = x
+
+  def clear_detail(self):
+    if self.has_detail_:
+      self.has_detail_ = 0
+      self.detail_ = ""
+
+  def has_detail(self): return self.has_detail_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_code()): self.set_code(x.code())
+    if (x.has_detail()): self.set_detail(x.detail())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_code_ != x.has_code_: return 0
+    if self.has_code_ and self.code_ != x.code_: return 0
+    if self.has_detail_ != x.has_detail_: return 0
+    if self.has_detail_ and self.detail_ != x.detail_: return 0
+    return 1
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.MAX_TYPE,
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_code_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: code not set.')
+    if (not self.has_detail_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: detail not set.')
+    return initialized
 
-   ProtocolBuffer.Encoder.STRING,
+  def ByteSize(self):
+    n = 0
+    n += self.lengthVarInt64(self.code_)
+    n += self.lengthString(len(self.detail_))
+    return n + 2
+
+  def Clear(self):
+    self.clear_code()
+    self.clear_detail()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(8)
+    out.putVarInt32(self.code_)
+    out.putVarInt32(18)
+    out.putPrefixedString(self.detail_)
 
-   ProtocolBuffer.Encoder.STRING,
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_code(d.getVarInt32())
+        continue
+      if tt == 18:
+        self.set_detail(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_code_: res+=prefix+("code: %s\n" % self.DebugFormatInt32(self.code_))
+    if self.has_detail_: res+=prefix+("detail: %s\n" % self.DebugFormatString(self.detail_))
+    return res
 
-   ProtocolBuffer.Encoder.STRING,
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kcode = 1
+  kdetail = 2
 
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "code",
+    2: "detail",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -186,6 +301,10 @@
   response_ = None
   has_exception_ = 0
   exception_ = None
+  has_application_error_ = 0
+  application_error_ = None
+  has_java_exception_ = 0
+  java_exception_ = None
 
   def __init__(self, contents=None):
     self.lazy_init_lock_ = thread.allocate_lock()
@@ -227,11 +346,49 @@
 
   def has_exception(self): return self.has_exception_
 
+  def application_error(self):
+    if self.application_error_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.application_error_ is None: self.application_error_ = ApplicationError()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.application_error_
+
+  def mutable_application_error(self): self.has_application_error_ = 1; return self.application_error()
+
+  def clear_application_error(self):
+    if self.has_application_error_:
+      self.has_application_error_ = 0;
+      if self.application_error_ is not None: self.application_error_.Clear()
+
+  def has_application_error(self): return self.has_application_error_
+
+  def java_exception(self):
+    if self.java_exception_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.java_exception_ is None: self.java_exception_ = RawMessage()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.java_exception_
+
+  def mutable_java_exception(self): self.has_java_exception_ = 1; return self.java_exception()
+
+  def clear_java_exception(self):
+    if self.has_java_exception_:
+      self.has_java_exception_ = 0;
+      if self.java_exception_ is not None: self.java_exception_.Clear()
+
+  def has_java_exception(self): return self.has_java_exception_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_response()): self.mutable_response().MergeFrom(x.response())
     if (x.has_exception()): self.mutable_exception().MergeFrom(x.exception())
+    if (x.has_application_error()): self.mutable_application_error().MergeFrom(x.application_error())
+    if (x.has_java_exception()): self.mutable_java_exception().MergeFrom(x.java_exception())
 
   def Equals(self, x):
     if x is self: return 1
@@ -239,23 +396,33 @@
     if self.has_response_ and self.response_ != x.response_: return 0
     if self.has_exception_ != x.has_exception_: return 0
     if self.has_exception_ and self.exception_ != x.exception_: return 0
+    if self.has_application_error_ != x.has_application_error_: return 0
+    if self.has_application_error_ and self.application_error_ != x.application_error_: return 0
+    if self.has_java_exception_ != x.has_java_exception_: return 0
+    if self.has_java_exception_ and self.java_exception_ != x.java_exception_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
     initialized = 1
     if (self.has_response_ and not self.response_.IsInitialized(debug_strs)): initialized = 0
     if (self.has_exception_ and not self.exception_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_application_error_ and not self.application_error_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_java_exception_ and not self.java_exception_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
     if (self.has_response_): n += 1 + self.lengthString(self.response_.ByteSize())
     if (self.has_exception_): n += 1 + self.lengthString(self.exception_.ByteSize())
+    if (self.has_application_error_): n += 1 + self.lengthString(self.application_error_.ByteSize())
+    if (self.has_java_exception_): n += 1 + self.lengthString(self.java_exception_.ByteSize())
     return n + 0
 
   def Clear(self):
     self.clear_response()
     self.clear_exception()
+    self.clear_application_error()
+    self.clear_java_exception()
 
   def OutputUnchecked(self, out):
     if (self.has_response_):
@@ -266,6 +433,14 @@
       out.putVarInt32(18)
       out.putVarInt32(self.exception_.ByteSize())
       self.exception_.OutputUnchecked(out)
+    if (self.has_application_error_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.application_error_.ByteSize())
+      self.application_error_.OutputUnchecked(out)
+    if (self.has_java_exception_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.java_exception_.ByteSize())
+      self.java_exception_.OutputUnchecked(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -282,6 +457,18 @@
         d.skip(length)
         self.mutable_exception().TryMerge(tmp)
         continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_application_error().TryMerge(tmp)
+        continue
+      if tt == 34:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_java_exception().TryMerge(tmp)
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -296,24 +483,40 @@
       res+=prefix+"exception <\n"
       res+=self.exception_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+    if self.has_application_error_:
+      res+=prefix+"application_error <\n"
+      res+=self.application_error_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_java_exception_:
+      res+=prefix+"java_exception <\n"
+      res+=self.java_exception_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kresponse = 1
   kexception = 2
-
-  _TEXT = (
-   "ErrorCode",
-   "response",
-   "exception",
-  )
+  kapplication_error = 3
+  kjava_exception = 4
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "response",
+    2: "exception",
+    3: "application_error",
+    4: "java_exception",
+  }, 4)
 
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -572,36 +775,35 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kPreconditionGroup = 1
   kPreconditionkey = 2
   kPreconditionhash = 3
   kputs = 4
   kdeletes = 5
 
-  _TEXT = (
-   "ErrorCode",
-   "Precondition",
-   "key",
-   "hash",
-   "puts",
-   "deletes",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Precondition",
+    2: "key",
+    3: "hash",
+    4: "puts",
+    5: "deletes",
+  }, 5)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 
-__all__ = ['Request','Response','TransactionRequest','TransactionRequest_Precondition']
+__all__ = ['Request','ApplicationError','Response','TransactionRequest','TransactionRequest_Precondition']
--- a/thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_stub.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_stub.py	Mon Sep 07 20:27:37 2009 +0200
@@ -71,6 +71,8 @@
 import threading
 import yaml
 
+from google.appengine.api import datastore
+from google.appengine.api import apiproxy_rpc
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.datastore import datastore_pb
 from google.appengine.ext.remote_api import remote_api_pb
@@ -86,6 +88,10 @@
   """Exception for configuration errors."""
 
 
+class UnknownJavaServerError(Error):
+  """Exception for exceptions returned from a Java remote_api handler."""
+
+
 def GetUserAgent():
   """Determines the value of the 'User-agent' header to use for HTTP requests.
 
@@ -136,20 +142,41 @@
     self._server = server
     self._path = path
 
+  def _PreHookHandler(self, service, call, request, response):
+    pass
+
+  def _PostHookHandler(self, service, call, request, response):
+    pass
+
   def MakeSyncCall(self, service, call, request, response):
+    self._PreHookHandler(service, call, request, response)
     request_pb = remote_api_pb.Request()
     request_pb.set_service_name(service)
     request_pb.set_method(call)
     request_pb.mutable_request().set_contents(request.Encode())
 
     response_pb = remote_api_pb.Response()
-    response_pb.ParseFromString(self._server.Send(self._path,
-                                                  request_pb.Encode()))
+    encoded_request = request_pb.Encode()
+    encoded_response = self._server.Send(self._path, encoded_request)
+    response_pb.ParseFromString(encoded_response)
 
-    if response_pb.has_exception():
-      raise pickle.loads(response_pb.exception().contents())
-    else:
-      response.ParseFromString(response_pb.response().contents())
+    try:
+      if response_pb.has_application_error():
+        error_pb = response_pb.application_error()
+        raise datastore._ToDatastoreError(
+            apiproxy_errors.ApplicationError(error_pb.code(), error_pb.detail()))
+      elif response_pb.has_exception():
+        raise pickle.loads(response_pb.exception().contents())
+      elif response_pb.has_java_exception():
+        raise UnknownJavaServerError("An unknown error has occured in the "
+                                     "Java remote_api handler for this call.")
+      else:
+        response.ParseFromString(response_pb.response().contents())
+    finally:
+      self._PostHookHandler(service, call, request, response)
+
+  def CreateRPC(self):
+    return apiproxy_rpc.RPC(stub=self)
 
 
 class RemoteDatastoreStub(RemoteStub):
@@ -192,10 +219,12 @@
       self.__next_local_cursor += 1
     finally:
       self.__local_cursor_lock.release()
+    query.clear_count()
     self.__queries[cursor_id] = query
 
     query_result.mutable_cursor().set_cursor(cursor_id)
     query_result.set_more_results(True)
+    query_result.set_keys_only(query.keys_only())
 
   def _Dynamic_Next(self, next_request, query_result):
     cursor = next_request.cursor().cursor()
@@ -214,6 +243,7 @@
       request.set_limit(min(request.limit(), next_request.count()))
     else:
       request.set_limit(next_request.count())
+    request.set_count(request.limit())
 
     super(RemoteDatastoreStub, self).MakeSyncCall(
         'remote_datastore', 'RunQuery', request, query_result)
@@ -229,8 +259,8 @@
     if get_request.has_transaction():
       txid = get_request.transaction().handle()
       txdata = self.__transactions[txid]
-      assert (txdata.thread_id == thread.get_ident(),
-              "Transactions are single-threaded.")
+      assert (txdata.thread_id ==
+          thread.get_ident()), "Transactions are single-threaded."
 
       keys = [(k, k.Encode()) for k in get_request.key_list()]
 
@@ -296,8 +326,8 @@
 
       txid = put_request.transaction().handle()
       txdata = self.__transactions[txid]
-      assert (txdata.thread_id == thread.get_ident(),
-              "Transactions are single-threaded.")
+      assert (txdata.thread_id ==
+          thread.get_ident()), "Transactions are single-threaded."
       for entity in entities:
         txdata.entities[entity.key().Encode()] = (entity.key(), entity)
         put_response.add_key().CopyFrom(entity.key())
@@ -309,8 +339,8 @@
     if delete_request.has_transaction():
       txid = delete_request.transaction().handle()
       txdata = self.__transactions[txid]
-      assert (txdata.thread_id == thread.get_ident(),
-              "Transactions are single-threaded.")
+      assert (txdata.thread_id ==
+          thread.get_ident()), "Transactions are single-threaded."
       for key in delete_request.key_list():
         txdata.entities[key.Encode()] = (key, None)
     else:
@@ -335,8 +365,8 @@
           'Transaction %d not found.' % (txid,))
 
     txdata = self.__transactions[txid]
-    assert (txdata.thread_id == thread.get_ident(),
-            "Transactions are single-threaded.")
+    assert (txdata.thread_id ==
+        thread.get_ident()), "Transactions are single-threaded."
     del self.__transactions[txid]
 
     tx = remote_api_pb.TransactionRequest()
@@ -367,8 +397,8 @@
             datastore_pb.Error.BAD_REQUEST,
             'Transaction %d not found.' % (txid,))
 
-      assert (txdata[txid].thread_id == thread.get_ident(),
-              "Transactions are single-threaded.")
+      assert (txdata[txid].thread_id ==
+          thread.get_ident()), "Transactions are single-threaded."
       del self.__transactions[txid]
     finally:
       self.__local_tx_lock.release()
@@ -386,14 +416,14 @@
         'The remote datastore does not support index manipulation.')
 
 
-def ConfigureRemoteDatastore(app_id,
-                             path,
-                             auth_func,
-                             servername=None,
-                             rpc_server_factory=appengine_rpc.HttpRpcServer,
-                             rtok=None,
-                             secure=False):
-  """Does necessary setup to allow easy remote access to an AppEngine datastore.
+def ConfigureRemoteApi(app_id,
+                       path,
+                       auth_func,
+                       servername=None,
+                       rpc_server_factory=appengine_rpc.HttpRpcServer,
+                       rtok=None,
+                       secure=False):
+  """Does necessary setup to allow easy remote access to App Engine APIs.
 
   Either servername must be provided or app_id must not be None.  If app_id
   is None and a servername is provided, this function will send a request
@@ -438,10 +468,32 @@
     if not app_info or 'rtok' not in app_info or 'app_id' not in app_info:
       raise ConfigurationError('Error parsing app_id lookup response')
     if app_info['rtok'] != rtok:
-      raise ConfigurationError('Token validation failed during app_id lookup.')
+      raise ConfigurationError('Token validation failed during app_id lookup. '
+                               '(sent %s, got %s)' % (repr(rtok),
+                                                      repr(app_info['rtok'])))
     app_id = app_info['app_id']
 
   os.environ['APPLICATION_ID'] = app_id
   apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
-  stub = RemoteDatastoreStub(server, path)
-  apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', stub)
+  datastore_stub = RemoteDatastoreStub(server, path)
+  apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore_stub)
+  stub = RemoteStub(server, path)
+  for service in ['capability_service', 'images', 'mail', 'memcache',
+                  'urlfetch']:
+    apiproxy_stub_map.apiproxy.RegisterStub(service, stub)
+
+
+def MaybeInvokeAuthentication():
+  """Sends an empty request through to the configured end-point.
+
+  If authentication is necessary, this will cause the rpc_server to invoke
+  interactive authentication.
+  """
+  datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
+  if isinstance(datastore_stub, RemoteStub):
+    datastore_stub._server.Send(datastore_stub._path, payload=None)
+  else:
+    raise ConfigurationError('remote_api is not configured.')
+
+
+ConfigureRemoteDatastore = ConfigureRemoteApi
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/remote_api/throttle.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,637 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Client-side transfer throttling for use with remote_api_stub.
+
+This module is used to configure rate limiting for programs accessing
+AppEngine services through remote_api.
+
+See the Throttle class for more information.
+
+An example with throttling:
+---
+from google.appengine.ext import db
+from google.appengine.ext.remote_api import remote_api_stub
+from google.appengine.ext.remote_api import throttle
+from myapp import models
+import getpass
+import threading
+
+def auth_func():
+  return (raw_input('Username:'), getpass.getpass('Password:'))
+
+remote_api_stub.ConfigureRemoteDatastore('my-app', '/remote_api', auth_func)
+full_throttle = throttle.DefaultThrottle(multiplier=1.0)
+throttle.ThrottleRemoteDatastore(full_throttle)
+
+# Register any threads that will be using the datastore with the throttler
+full_throttle.Register(threading.currentThread())
+
+# Now you can access the remote datastore just as if your code was running on
+# App Engine, and you don't need to worry about exceeding quota limits!
+
+houses = models.House.all().fetch(100)
+for a_house in houses:
+  a_house.doors += 1
+db.put(houses)
+---
+
+This example limits usage to the default free quota levels.  The multiplier
+kwarg to throttle.DefaultThrottle can be used to scale the throttle levels
+higher or lower.
+
+Throttles can also be constructed directly for more control over the limits
+for different operations.  See the Throttle class and the constants following
+it for details.
+"""
+
+
+import logging
+import threading
+import time
+import urllib2
+import urlparse
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.ext.remote_api import remote_api_stub
+from google.appengine.tools import appengine_rpc
+
+logger = logging.getLogger('google.appengine.ext.remote_api.throttle')
+
+MINIMUM_THROTTLE_SLEEP_DURATION = 0.001
+
+
+class Error(Exception):
+  """Base class for errors in this module."""
+
+
+class ThreadNotRegisteredError(Error):
+  """An unregistered thread has accessed the throttled datastore stub."""
+
+
+class UnknownThrottleNameError(Error):
+  """A transfer was added for an unknown throttle name."""
+
+
+def InterruptibleSleep(sleep_time):
+  """Puts thread to sleep, checking this threads exit_flag four times a second.
+
+  Args:
+    sleep_time: Time to sleep.
+  """
+  slept = 0.0
+  epsilon = .0001
+  thread = threading.currentThread()
+  while slept < sleep_time - epsilon:
+    remaining = sleep_time - slept
+    this_sleep_time = min(remaining, 0.25)
+    time.sleep(this_sleep_time)
+    slept += this_sleep_time
+    if hasattr(thread, 'exit_flag') and thread.exit_flag:
+      return
+
+
+class Throttle(object):
+  """A base class for upload rate throttling.
+
+  Transferring large number of entities, too quickly, could trigger
+  quota limits and cause the transfer process to halt.  In order to
+  stay within the application's quota, we throttle the data transfer
+  to a specified limit (across all transfer threads).
+
+  This class tracks a moving average of some aspect of the transfer
+  rate (bandwidth, records per second, http connections per
+  second). It keeps two windows of counts of bytes transferred, on a
+  per-thread basis. One block is the "current" block, and the other is
+  the "prior" block. It will rotate the counts from current to prior
+  when ROTATE_PERIOD has passed.  Thus, the current block will
+  represent from 0 seconds to ROTATE_PERIOD seconds of activity
+  (determined by: time.time() - self.last_rotate).  The prior block
+  will always represent a full ROTATE_PERIOD.
+
+  Sleeping is performed just before a transfer of another block, and is
+  based on the counts transferred *before* the next transfer. It really
+  does not matter how much will be transferred, but only that for all the
+  data transferred SO FAR that we have interspersed enough pauses to
+  ensure the aggregate transfer rate is within the specified limit.
+
+  These counts are maintained on a per-thread basis, so we do not require
+  any interlocks around incrementing the counts. There IS an interlock on
+  the rotation of the counts because we do not want multiple threads to
+  multiply-rotate the counts.
+
+  There are various race conditions in the computation and collection
+  of these counts. We do not require precise values, but simply to
+  keep the overall transfer within the bandwidth limits. If a given
+  pause is a little short, or a little long, then the aggregate delays
+  will be correct.
+  """
+
+  ROTATE_PERIOD = 600
+
+  def __init__(self,
+               get_time=time.time,
+               thread_sleep=InterruptibleSleep,
+               layout=None):
+    self.get_time = get_time
+    self.thread_sleep = thread_sleep
+
+    self.start_time = get_time()
+    self.transferred = {}
+    self.prior_block = {}
+    self.totals = {}
+    self.throttles = {}
+
+    self.last_rotate = {}
+    self.rotate_mutex = {}
+    if layout:
+      self.AddThrottles(layout)
+
+  def AddThrottle(self, name, limit):
+    self.throttles[name] = limit
+    self.transferred[name] = {}
+    self.prior_block[name] = {}
+    self.totals[name] = {}
+    self.last_rotate[name] = self.get_time()
+    self.rotate_mutex[name] = threading.Lock()
+
+  def AddThrottles(self, layout):
+    for key, value in layout.iteritems():
+      self.AddThrottle(key, value)
+
+  def Register(self, thread):
+    """Register this thread with the throttler."""
+    thread_id = id(thread)
+    for throttle_name in self.throttles.iterkeys():
+      self.transferred[throttle_name][thread_id] = 0
+      self.prior_block[throttle_name][thread_id] = 0
+      self.totals[throttle_name][thread_id] = 0
+
+  def VerifyThrottleName(self, throttle_name):
+    if throttle_name not in self.throttles:
+      raise UnknownThrottleNameError('%s is not a registered throttle' %
+                                     throttle_name)
+
+  def AddTransfer(self, throttle_name, token_count):
+    """Add a count to the amount this thread has transferred.
+
+    Each time a thread transfers some data, it should call this method to
+    note the amount sent. The counts may be rotated if sufficient time
+    has passed since the last rotation.
+
+    Args:
+      throttle_name: The name of the throttle to add to.
+      token_count: The number to add to the throttle counter.
+    """
+    self.VerifyThrottleName(throttle_name)
+    transferred = self.transferred[throttle_name]
+    try:
+      transferred[id(threading.currentThread())] += token_count
+    except KeyError:
+      thread = threading.currentThread()
+      raise ThreadNotRegisteredError(
+          'Unregistered thread accessing throttled datastore stub: id = %s\n'
+          'name = %s' % (id(thread), thread.getName()))
+
+    if self.last_rotate[throttle_name] + self.ROTATE_PERIOD < self.get_time():
+      self._RotateCounts(throttle_name)
+
+  def Sleep(self, throttle_name=None):
+    """Possibly sleep in order to limit the transfer rate.
+
+    Note that we sleep based on *prior* transfers rather than what we
+    may be about to transfer. The next transfer could put us under/over
+    and that will be rectified *after* that transfer. Net result is that
+    the average transfer rate will remain within bounds. Spiky behavior
+    or uneven rates among the threads could possibly bring the transfer
+    rate above the requested limit for short durations.
+
+    Args:
+      throttle_name: The name of the throttle to sleep on.  If None or
+        omitted, then sleep on all throttles.
+    """
+    if throttle_name is None:
+      for throttle_name in self.throttles:
+        self.Sleep(throttle_name=throttle_name)
+      return
+
+    self.VerifyThrottleName(throttle_name)
+
+    thread = threading.currentThread()
+
+    while True:
+      duration = self.get_time() - self.last_rotate[throttle_name]
+
+      total = 0
+      for count in self.prior_block[throttle_name].values():
+        total += count
+
+      if total:
+        duration += self.ROTATE_PERIOD
+
+      for count in self.transferred[throttle_name].values():
+        total += count
+
+      sleep_time = self._SleepTime(total, self.throttles[throttle_name],
+                                   duration)
+
+      if sleep_time < MINIMUM_THROTTLE_SLEEP_DURATION:
+        break
+
+      logger.debug('[%s] Throttling on %s. Sleeping for %.1f ms '
+                   '(duration=%.1f ms, total=%d)',
+                   thread.getName(), throttle_name,
+                   sleep_time * 1000, duration * 1000, total)
+      self.thread_sleep(sleep_time)
+      if thread.exit_flag:
+        break
+      self._RotateCounts(throttle_name)
+
+  def _SleepTime(self, total, limit, duration):
+    """Calculate the time to sleep on a throttle.
+
+    Args:
+      total: The total amount transferred.
+      limit: The amount per second that is allowed to be sent.
+      duration: The amount of time taken to send the total.
+
+    Returns:
+      A float for the amount of time to sleep.
+    """
+    if not limit:
+      return 0.0
+    return max(0.0, (total / limit) - duration)
+
+  def _RotateCounts(self, throttle_name):
+    """Rotate the transfer counters.
+
+    If sufficient time has passed, then rotate the counters from active to
+    the prior-block of counts.
+
+    This rotation is interlocked to ensure that multiple threads do not
+    over-rotate the counts.
+
+    Args:
+      throttle_name: The name of the throttle to rotate.
+    """
+    self.VerifyThrottleName(throttle_name)
+    self.rotate_mutex[throttle_name].acquire()
+    try:
+      next_rotate_time = self.last_rotate[throttle_name] + self.ROTATE_PERIOD
+      if next_rotate_time >= self.get_time():
+        return
+
+      for name, count in self.transferred[throttle_name].items():
+
+
+        self.prior_block[throttle_name][name] = count
+        self.transferred[throttle_name][name] = 0
+
+        self.totals[throttle_name][name] += count
+
+      self.last_rotate[throttle_name] = self.get_time()
+
+    finally:
+      self.rotate_mutex[throttle_name].release()
+
+  def TotalTransferred(self, throttle_name):
+    """Return the total transferred, and over what period.
+
+    Args:
+      throttle_name: The name of the throttle to total.
+
+    Returns:
+      A tuple of the total count and running time for the given throttle name.
+    """
+    total = 0
+    for count in self.totals[throttle_name].values():
+      total += count
+    for count in self.transferred[throttle_name].values():
+      total += count
+    return total, self.get_time() - self.start_time
+
+
+BANDWIDTH_UP = 'http-bandwidth-up'
+BANDWIDTH_DOWN = 'http-bandwidth-down'
+REQUESTS = 'http-requests'
+HTTPS_BANDWIDTH_UP = 'https-bandwidth-up'
+HTTPS_BANDWIDTH_DOWN = 'https-bandwidth-down'
+HTTPS_REQUESTS = 'https-requests'
+DATASTORE_CALL_COUNT = 'datastore-call-count'
+ENTITIES_FETCHED = 'entities-fetched'
+ENTITIES_MODIFIED = 'entities-modified'
+INDEX_MODIFICATIONS = 'index-modifications'
+
+
+DEFAULT_LIMITS = {
+    BANDWIDTH_UP: 100000,
+    BANDWIDTH_DOWN: 100000,
+    REQUESTS: 15,
+    HTTPS_BANDWIDTH_UP: 100000,
+    HTTPS_BANDWIDTH_DOWN: 100000,
+    HTTPS_REQUESTS: 15,
+    DATASTORE_CALL_COUNT: 120,
+    ENTITIES_FETCHED: 400,
+    ENTITIES_MODIFIED: 400,
+    INDEX_MODIFICATIONS: 1600,
+}
+
+NO_LIMITS = {
+    BANDWIDTH_UP: None,
+    BANDWIDTH_DOWN: None,
+    REQUESTS: None,
+    HTTPS_BANDWIDTH_UP: None,
+    HTTPS_BANDWIDTH_DOWN: None,
+    HTTPS_REQUESTS: None,
+    DATASTORE_CALL_COUNT: None,
+    ENTITIES_FETCHED: None,
+    ENTITIES_MODIFIED: None,
+    INDEX_MODIFICATIONS: None,
+}
+
+
+def DefaultThrottle(multiplier=1.0):
+  """Return a Throttle instance with multiplier * the quota limits."""
+  layout = dict([(name, multiplier * limit)
+                 for (name, limit) in DEFAULT_LIMITS.iteritems()])
+  return Throttle(layout=layout)
+
+
+class ThrottleHandler(urllib2.BaseHandler):
+  """A urllib2 handler for http and https requests that adds to a throttle."""
+
+  def __init__(self, throttle):
+    """Initialize a ThrottleHandler.
+
+    Args:
+      throttle: A Throttle instance to call for bandwidth and http/https request
+        throttling.
+    """
+    self.throttle = throttle
+
+  def AddRequest(self, throttle_name, req):
+    """Add to bandwidth throttle for given request.
+
+    Args:
+      throttle_name: The name of the bandwidth throttle to add to.
+      req: The request whose size will be added to the throttle.
+    """
+    size = 0
+    for key, value in req.headers.iteritems():
+      size += len('%s: %s\n' % (key, value))
+    for key, value in req.unredirected_hdrs.iteritems():
+      size += len('%s: %s\n' % (key, value))
+    (unused_scheme,
+     unused_host_port, url_path,
+     unused_query, unused_fragment) = urlparse.urlsplit(req.get_full_url())
+    size += len('%s %s HTTP/1.1\n' % (req.get_method(), url_path))
+    data = req.get_data()
+    if data:
+      size += len(data)
+    self.throttle.AddTransfer(throttle_name, size)
+
+  def AddResponse(self, throttle_name, res):
+    """Add to bandwidth throttle for given response.
+
+    Args:
+      throttle_name: The name of the bandwidth throttle to add to.
+      res: The response whose size will be added to the throttle.
+    """
+    content = res.read()
+
+    def ReturnContent():
+      return content
+
+    res.read = ReturnContent
+    size = len(content)
+    headers = res.info()
+    for key, value in headers.items():
+      size += len('%s: %s\n' % (key, value))
+    self.throttle.AddTransfer(throttle_name, size)
+
+  def http_request(self, req):
+    """Process an HTTP request.
+
+    If the throttle is over quota, sleep first.  Then add request size to
+    throttle before returning it to be sent.
+
+    Args:
+      req: A urllib2.Request object.
+
+    Returns:
+      The request passed in.
+    """
+    self.throttle.Sleep(BANDWIDTH_UP)
+    self.throttle.Sleep(BANDWIDTH_DOWN)
+    self.AddRequest(BANDWIDTH_UP, req)
+    return req
+
+  def https_request(self, req):
+    """Process an HTTPS request.
+
+    If the throttle is over quota, sleep first.  Then add request size to
+    throttle before returning it to be sent.
+
+    Args:
+      req: A urllib2.Request object.
+
+    Returns:
+      The request passed in.
+    """
+    self.throttle.Sleep(HTTPS_BANDWIDTH_UP)
+    self.throttle.Sleep(HTTPS_BANDWIDTH_DOWN)
+    self.AddRequest(HTTPS_BANDWIDTH_UP, req)
+    return req
+
+  def http_response(self, unused_req, res):
+    """Process an HTTP response.
+
+    The size of the response is added to the bandwidth throttle and the request
+    throttle is incremented by one.
+
+    Args:
+      unused_req: The urllib2 request for this response.
+      res: A urllib2 response object.
+
+    Returns:
+      The response passed in.
+    """
+    self.AddResponse(BANDWIDTH_DOWN, res)
+    self.throttle.AddTransfer(REQUESTS, 1)
+    return res
+
+  def https_response(self, unused_req, res):
+    """Process an HTTPS response.
+
+    The size of the response is added to the bandwidth throttle and the request
+    throttle is incremented by one.
+
+    Args:
+      unused_req: The urllib2 request for this response.
+      res: A urllib2 response object.
+
+    Returns:
+      The response passed in.
+    """
+    self.AddResponse(HTTPS_BANDWIDTH_DOWN, res)
+    self.throttle.AddTransfer(HTTPS_REQUESTS, 1)
+    return res
+
+
+class ThrottledHttpRpcServer(appengine_rpc.HttpRpcServer):
+  """Provides a simplified RPC-style interface for HTTP requests.
+
+  This RPC server uses a Throttle to prevent exceeding quotas.
+  """
+
+  def __init__(self, throttle, *args, **kwargs):
+    """Initialize a ThrottledHttpRpcServer.
+
+    Also sets request_manager.rpc_server to the ThrottledHttpRpcServer instance.
+
+    Args:
+      throttle: A Throttles instance.
+      args: Positional arguments to pass through to
+        appengine_rpc.HttpRpcServer.__init__
+      kwargs: Keyword arguments to pass through to
+        appengine_rpc.HttpRpcServer.__init__
+    """
+    self.throttle = throttle
+    appengine_rpc.HttpRpcServer.__init__(self, *args, **kwargs)
+
+  def _GetOpener(self):
+    """Returns an OpenerDirector that supports cookies and ignores redirects.
+
+    Returns:
+      A urllib2.OpenerDirector object.
+    """
+    opener = appengine_rpc.HttpRpcServer._GetOpener(self)
+    opener.add_handler(ThrottleHandler(self.throttle))
+
+    return opener
+
+
+def ThrottledHttpRpcServerFactory(throttle):
+  """Create a factory to produce ThrottledHttpRpcServer for a given throttle.
+
+  Args:
+    throttle: A Throttle instance to use for the ThrottledHttpRpcServer.
+
+  Returns:
+    A factory to produce a ThrottledHttpRpcServer.
+  """
+
+  def MakeRpcServer(*args, **kwargs):
+    """Factory to produce a ThrottledHttpRpcServer.
+
+    Args:
+      args: Positional args to pass to ThrottledHttpRpcServer.
+      kwargs: Keyword args to pass to ThrottledHttpRpcServer.
+
+    Returns:
+      A ThrottledHttpRpcServer instance.
+    """
+    kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
+    kwargs['save_cookies'] = True
+    rpc_server = ThrottledHttpRpcServer(throttle, *args, **kwargs)
+    return rpc_server
+  return MakeRpcServer
+
+
+class Throttler(object):
+  def PrehookHandler(self, service, call, request, response):
+    handler = getattr(self, '_Prehook_' + call, None)
+    if handler:
+      handler(request, response)
+
+  def PosthookHandler(self, service, call, request, response):
+    handler = getattr(self, '_Posthook_' + call, None)
+    if handler:
+      handler(request, response)
+
+
+def SleepHandler(*throttle_names):
+  def SleepOnThrottles(self, request, response):
+    for throttle_name in throttle_names:
+      self._DatastoreThrottler__throttle.Sleep(throttle_name)
+  return SleepOnThrottles
+
+
+class DatastoreThrottler(Throttler):
+  def __init__(self, throttle):
+    Throttler.__init__(self)
+    self.__throttle = throttle
+
+  def AddCost(self, cost_proto):
+    """Add costs from the Cost protobuf."""
+    self.__throttle.AddTransfer(INDEX_MODIFICATIONS, cost_proto.index_writes())
+    self.__throttle.AddTransfer(ENTITIES_MODIFIED, cost_proto.entity_writes())
+
+
+  _Prehook_Put = SleepHandler(ENTITIES_MODIFIED, INDEX_MODIFICATIONS)
+
+  def _Posthook_Put(self, request, response):
+    self.AddCost(response.cost())
+
+
+  _Prehook_Get = SleepHandler(ENTITIES_FETCHED)
+
+  def _Posthook_Get(self, request, response):
+    self.__throttle.AddTransfer(ENTITIES_FETCHED, response.entity_size())
+
+
+  _Prehook_RunQuery = SleepHandler(ENTITIES_FETCHED)
+
+  def _Posthook_RunQuery(self, request, response):
+    if not response.keys_only():
+      self.__throttle.AddTransfer(ENTITIES_FETCHED, response.result_size())
+
+
+  _Prehook_Next = SleepHandler(ENTITIES_FETCHED)
+
+  def _Posthook_Next(self, request, response):
+    if not response.keys_only():
+      self.__throttle.AddTransfer(ENTITIES_FETCHED, response.result_size())
+
+
+  _Prehook_Delete = SleepHandler(ENTITIES_MODIFIED, INDEX_MODIFICATIONS)
+
+  def _Posthook_Delete(self, request, response):
+    self.AddCost(response.cost())
+
+
+  _Prehook_Commit = SleepHandler()
+
+  def _Posthook_Commit(self, request, response):
+    self.AddCost(response.cost())
+
+
+def ThrottleRemoteDatastore(throttle, remote_datastore_stub=None):
+  """Install the given throttle for the remote datastore stub.
+
+  Args:
+    throttle: A Throttle instance to limit datastore access rates
+    remote_datastore_stub: The datstore stub instance to throttle, for
+      testing purposes.
+  """
+  if not remote_datastore_stub:
+    remote_datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
+  if not isinstance(remote_datastore_stub, remote_api_stub.RemoteDatastoreStub):
+    raise remote_api_stub.ConfigurationError('remote_api is not configured.')
+  throttler = DatastoreThrottler(throttle)
+  remote_datastore_stub._PreHookHandler = throttler.PrehookHandler
+  remote_datastore_stub._PostHookHandler = throttler.PosthookHandler
--- a/thirdparty/google_appengine/google/appengine/ext/search/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/search/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -46,6 +46,40 @@
 
 The full text index is stored in a property named __searchable_text_index.
 
+Specifying multiple indexes and properties to index
+---------------------------------------------------
+
+By default, one index is created with all string properties. You can define
+multiple indexes and specify which properties should be indexed for each by
+overriding SearchableProperties() method of model.SearchableModel, for example:
+
+  class Article(search.SearchableModel):
+    @classmethod
+    def SearchableProperties(cls):
+      return [['book', 'author'], ['book']]
+
+In this example, two indexes will be maintained - one that includes 'book' and
+'author' properties, and another one for 'book' property only. They will be
+stored in properties named __searchable_text_index_book_author and
+__searchable_text_index_book respectively. Note that the index that includes
+all properties will not be created unless added explicitly like this:
+
+  @classmethod
+  def SearchableProperties(cls):
+    return [['book', 'author'], ['book'], search.ALL_PROPERTIES]
+
+The default return value of SearchableProperties() is [search.ALL_PROPERTIES]
+(one index, all properties).
+
+To search using a custom-defined index, pass its definition
+in 'properties' parameter of 'search':
+
+  Article.all().search('Lem', properties=['book', 'author'])
+
+Note that the order of properties in the list matters.
+
+Adding indexes to  index.yaml
+-----------------------------
 
 In general, if you just want to provide full text search, you *don't* need to
 add any extra indexes to your index.yaml. However, if you want to use search()
@@ -60,6 +94,9 @@
       direction: desc
     ...
 
+Similarly, if you created a custom index (see above), use the name of the
+property it's stored in, e.g. __searchable_text_index_book_author.
+
 Note that using SearchableModel will noticeable increase the latency of save()
 operations, since it writes an index row for each indexable word. This also
 means that the latency of save() will increase roughly with the size of the
@@ -79,6 +116,8 @@
 from google.appengine.ext import db
 from google.appengine.datastore import datastore_pb
 
+ALL_PROPERTIES = []
+
 class SearchableEntity(datastore.Entity):
   """A subclass of datastore.Entity that supports full text indexing.
 
@@ -124,6 +163,8 @@
 
   _word_delimiter_regex = re.compile('[' + re.escape(string.punctuation) + ']')
 
+  _searchable_properties = [ALL_PROPERTIES]
+
   def __init__(self, kind_or_entity, word_delimiter_regex=None, *args,
                **kwargs):
     """Constructor. May be called as a copy constructor.
@@ -144,6 +185,9 @@
     if isinstance(kind_or_entity, datastore.Entity):
       self._Entity__key = kind_or_entity._Entity__key
       self._Entity__unindexed_properties = frozenset(kind_or_entity.unindexed_properties())
+      if isinstance(kind_or_entity, SearchableEntity):
+        if getattr(kind_or_entity, '_searchable_properties', None) is not None:
+          self._searchable_properties = kind_or_entity._searchable_properties
       self.update(kind_or_entity)
     else:
       super(SearchableEntity, self).__init__(kind_or_entity, *args, **kwargs)
@@ -154,22 +198,33 @@
     Returns:
       entity_pb.Entity
     """
-    if SearchableEntity._FULL_TEXT_INDEX_PROPERTY in self:
-      del self[SearchableEntity._FULL_TEXT_INDEX_PROPERTY]
+    for properties_to_index in self._searchable_properties:
+      index_property_name = SearchableEntity.IndexPropertyName(properties_to_index)
+      if index_property_name in self:
+        del self[index_property_name]
+
+
+      if not properties_to_index:
+        properties_to_index = self.keys()
+
+      index = set()
+      for name in properties_to_index:
+        if not self.has_key(name):
+          continue
 
-    index = set()
-    for (name, values) in self.items():
-      if not isinstance(values, list):
-        values = [values]
-      if (isinstance(values[0], basestring) and
-          not isinstance(values[0], datastore_types.Blob)):
-        for value in values:
-          index.update(SearchableEntity._FullTextIndex(
-              value, self._word_delimiter_regex))
+        values = self[name]
+        if not isinstance(values, list):
+          values = [values]
 
-    index_list = list(index)
-    if index_list:
-      self[SearchableEntity._FULL_TEXT_INDEX_PROPERTY] = index_list
+        if (isinstance(values[0], basestring) and
+            not isinstance(values[0], datastore_types.Blob)):
+          for value in values:
+            index.update(SearchableEntity._FullTextIndex(
+                value, self._word_delimiter_regex))
+
+      index_list = list(index)
+      if index_list:
+        self[index_property_name] = index_list
 
     return super(SearchableEntity, self)._ToPb()
 
@@ -206,6 +261,16 @@
 
     return words
 
+  @classmethod
+  def IndexPropertyName(cls, properties):
+    """Given index definition, returns the name of the property to put it in."""
+    name = SearchableEntity._FULL_TEXT_INDEX_PROPERTY
+
+    if properties:
+      name += '_' + '_'.join(properties)
+
+    return name
+
 
 class SearchableQuery(datastore.Query):
   """A subclass of datastore.Query that supports full text search.
@@ -214,7 +279,8 @@
   SearchableEntity or SearchableModel classes.
   """
 
-  def Search(self, search_query, word_delimiter_regex=None):
+  def Search(self, search_query, word_delimiter_regex=None,
+             properties=ALL_PROPERTIES):
     """Add a search query. This may be combined with filters.
 
     Note that keywords in the search query will be silently dropped if they
@@ -230,28 +296,27 @@
     datastore_types.ValidateString(search_query, 'search query')
     self._search_query = search_query
     self._word_delimiter_regex = word_delimiter_regex
+    self._properties = properties
     return self
 
-  def _ToPb(self, limit=None, offset=None):
+  def _ToPb(self, *args, **kwds):
     """Adds filters for the search query, then delegates to the superclass.
 
-    Raises BadFilterError if a filter on the index property already exists.
-
-    Args:
-      # an upper bound on the number of results returned by the query.
-      limit: int
-      # number of results that match the query to skip.  limit is applied
-      # after the offset is fulfilled.
-      offset: int
+    Mimics Query._ToPb()'s signature. Raises BadFilterError if a filter on the
+    index property already exists.
 
     Returns:
       datastore_pb.Query
     """
-    if SearchableEntity._FULL_TEXT_INDEX_PROPERTY in self:
+
+    properties = getattr(self, "_properties", ALL_PROPERTIES)
+
+    index_property_name = SearchableEntity.IndexPropertyName(properties)
+    if index_property_name in self:
       raise datastore_errors.BadFilterError(
-        '%s is a reserved name.' % SearchableEntity._FULL_TEXT_INDEX_PROPERTY)
+        '%s is a reserved name.' % index_property_name)
 
-    pb = super(SearchableQuery, self)._ToPb(limit=limit, offset=offset)
+    pb = super(SearchableQuery, self)._ToPb(*args, **kwds)
 
     if hasattr(self, '_search_query'):
       keywords = SearchableEntity._FullTextIndex(
@@ -260,7 +325,7 @@
         filter = pb.add_filter()
         filter.set_op(datastore_pb.Query_Filter.EQUAL)
         prop = filter.add_property()
-        prop.set_name(SearchableEntity._FULL_TEXT_INDEX_PROPERTY)
+        prop.set_name(index_property_name)
         prop.set_multiple(len(keywords) > 1)
         prop.mutable_value().set_stringvalue(unicode(keyword).encode('utf-8'))
 
@@ -290,13 +355,21 @@
 
   Automatically indexes all string-based properties. To search, use the all()
   method to get a SearchableModel.Query, then use its search() method.
+
+  Override SearchableProperties() to define properties to index and/or multiple
+  indexes (see the file's comment).
   """
 
+  @classmethod
+  def SearchableProperties(cls):
+    return [ALL_PROPERTIES]
+
   class Query(db.Query):
     """A subclass of db.Query that supports full text search."""
     _search_query = None
+    _properties = None
 
-    def search(self, search_query):
+    def search(self, search_query, properties=ALL_PROPERTIES):
       """Adds a full text search to this query.
 
       Args:
@@ -306,6 +379,13 @@
         self
       """
       self._search_query = search_query
+      self._properties = properties
+
+      if self._properties not in getattr(self, '_searchable_properties', [ALL_PROPERTIES]):
+        raise datastore_errors.BadFilterError(
+          '%s does not have a corresponding index. Please add it to'
+          'the SEARCHABLE_PROPERTIES list' % self._properties)
+
       return self
 
     def _get_query(self):
@@ -314,14 +394,16 @@
                                   _query_class=SearchableQuery,
                                   _multi_query_class=SearchableMultiQuery)
       if self._search_query:
-        query.Search(self._search_query)
+        query.Search(self._search_query, properties=self._properties)
       return query
 
   def _populate_internal_entity(self):
     """Wraps db.Model._populate_internal_entity() and injects
     SearchableEntity."""
-    return db.Model._populate_internal_entity(self,
-                                              _entity_class=SearchableEntity)
+    entity = db.Model._populate_internal_entity(self,
+                                                _entity_class=SearchableEntity)
+    entity._searchable_properties = self.SearchableProperties()
+    return entity
 
   @classmethod
   def from_entity(cls, entity):
@@ -333,4 +415,6 @@
   @classmethod
   def all(cls):
     """Returns a SearchableModel.Query for this kind."""
-    return SearchableModel.Query(cls)
+    query = SearchableModel.Query(cls)
+    query._searchable_properties = cls.SearchableProperties()
+    return query
--- a/thirdparty/google_appengine/google/appengine/ext/webapp/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/webapp/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -247,6 +247,9 @@
       except UnicodeError, e:
         logging.warning('Response written is not UTF-8: %s', e)
 
+    if (self.headers.get('Cache-Control') == 'no-cache' and
+        not self.headers.get('Expires')):
+      self.headers['Expires'] = 'Fri, 01 Jan 1990 00:00:00 GMT'
     self.headers['Content-Length'] = str(len(body))
     write = start_response('%d %s' % self.__status, self.__wsgi_headers)
     write(body)
@@ -463,6 +466,9 @@
   The URL mapping is first-match based on the list ordering.
   """
 
+  REQUEST_CLASS = Request
+  RESPONSE_CLASS = Response
+
   def __init__(self, url_mapping, debug=False):
     """Initializes this application with the given URL mapping.
 
@@ -477,8 +483,8 @@
 
   def __call__(self, environ, start_response):
     """Called by WSGI when a request comes in."""
-    request = Request(environ)
-    response = Response()
+    request = self.REQUEST_CLASS(environ)
+    response = self.RESPONSE_CLASS()
 
     WSGIApplication.active_instance = self
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/webapp/mail_handlers.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Handler library for inbound Mail API.
+
+Contains handlers to help with receiving mail.
+
+  InboundMailHandler: Has helper method for easily setting up
+    email recievers.
+"""
+
+
+
+
+
+from google.appengine.api import mail
+from google.appengine.ext import webapp
+
+
+MAIL_HANDLER_URL_PATTERN = '/_ah/mail/.+'
+
+
+class InboundMailHandler(webapp.RequestHandler):
+  """Base class for inbound mail handlers.
+
+  Example:
+
+    # Sub-class overrides receive method.
+    class HelloReceiver(InboundMailHandler):
+
+      def receive(self, mail_message):
+        logging.info('Received greeting from %s: %s' % (mail_message.sender,
+                                                        mail_message.body))
+
+
+    # Map mail handler to appliction.
+    application = webapp.WSGIApplication([
+        HelloReceiver.mapping(),
+    ])
+  """
+
+  def post(self):
+    """Transforms body to email request."""
+    self.receive(mail.InboundEmailMessage(self.request.body))
+
+  def receive(self, mail_message):
+    """Receive an email message.
+
+    Override this method to implement an email receiver.
+
+    Args:
+      mail_message: InboundEmailMessage instance representing received
+        email.
+    """
+    pass
+
+  @classmethod
+  def mapping(cls):
+    """Convenience method to map handler class to application.
+
+    Returns:
+      Mapping from email URL to inbound mail handler class.
+    """
+    return MAIL_HANDLER_URL_PATTERN, cls
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/webapp/xmpp_handlers.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""XMPP webapp handler classes.
+
+This module provides handler classes for XMPP bots, including both basic
+messaging functionality and a command handler for commands such as "/foo bar"
+"""
+
+
+
+import logging
+from google.appengine.api import xmpp
+from google.appengine.ext import webapp
+
+
+class BaseHandler(webapp.RequestHandler):
+  """A webapp baseclass for XMPP handlers.
+
+  Implements a straightforward message delivery pattern. When a message is
+  received, message_received is called with a Message object that encapsulates
+  the relevant details. Users can reply using the standard XMPP API, or the
+  convenient .reply() method on the Message object.
+  """
+
+  def message_received(self, message):
+    """Called when a message is sent to the XMPP bot.
+
+    Args:
+      message: Message: The message that was sent by the user.
+    """
+    raise NotImplementedError()
+
+  def handle_exception(self, exception, debug_mode):
+    """Called if this handler throws an exception during execution.
+
+    Args:
+      exception: the exception that was thrown
+      debug_mode: True if the web application is running in debug mode
+    """
+    super(BaseHandler, self).handle_exception(exception, debug_mode)
+    if self.xmpp_message:
+      self.xmpp_message.reply('Oops. Something went wrong.')
+
+  def post(self):
+    try:
+      self.xmpp_message = xmpp.Message(self.request.POST)
+    except xmpp.InvalidMessageError, e:
+      logging.error("Invalid XMPP request: Missing required field %s", e[0])
+      return
+    self.message_received(self.xmpp_message)
+
+
+class CommandHandlerMixin(object):
+  """A command handler for XMPP bots.
+
+  Implements a command handler pattern. XMPP messages are processed by calling
+  message_received. Message objects handled by this class are annotated with
+  'command' and 'arg' fields. On receipt of a message starting with a forward
+  or backward slash, the handler calls a method named after the command - eg,
+  if the user sends "/foo bar", the handler will call foo_command(message).
+  If no handler method matches, unhandled_command is called. The default behaviour
+  of unhandled_command is to send the message "Unknown command" back to
+  the sender.
+
+  If the user sends a message not prefixed with a slash,
+  text_message(message) is called.
+  """
+
+  def unhandled_command(self, message):
+    """Called when an unknown command is sent to the XMPP bot.
+
+    Args:
+      message: Message: The message that was sent by the user.
+    """
+    message.reply('Unknown command')
+
+  def text_message(self, message):
+    """Called when a message not prefixed by a /command is sent to the XMPP bot.
+
+    Args:
+      message: Message: The message that was sent by the user.
+    """
+    pass
+
+  def message_received(self, message):
+    """Called when a message is sent to the XMPP bot.
+
+    Args:
+      message: Message: The message that was sent by the user.
+    """
+    if message.command:
+      handler_name = '%s_command' % (message.command,)
+      handler = getattr(self, handler_name, None)
+      if handler:
+        handler(message)
+      else:
+        self.unhandled_command(message)
+    else:
+      self.text_message(message)
+
+
+class CommandHandler(CommandHandlerMixin, BaseHandler):
+  """A webapp implementation of CommandHandlerMixin."""
+  pass
--- a/thirdparty/google_appengine/google/appengine/runtime/apiproxy.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/runtime/apiproxy.py	Mon Sep 07 20:27:37 2009 +0200
@@ -131,6 +131,7 @@
 
   def __MakeCallDone(self):
     self.__state = RPC.FINISHING
+    self.cpu_usage_mcycles = self.__result_dict['cpu_usage_mcycles']
     if self.__result_dict['error'] == APPLICATION_ERROR:
       self.__exception = apiproxy_errors.ApplicationError(
           self.__result_dict['application_error'],
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/adaptive_thread_pool.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,460 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Provides thread-pool-like functionality for workers accessing App Engine.
+
+The pool adapts to slow or timing out requests by reducing the number of
+active workers, or increasing the number when requests latency reduces.
+"""
+
+
+
+import logging
+import Queue
+import sys
+import threading
+import time
+import traceback
+
+from google.appengine.tools.requeue import ReQueue
+
+logger = logging.getLogger('google.appengine.tools.adaptive_thread_pool')
+
+_THREAD_SHOULD_EXIT = '_THREAD_SHOULD_EXIT'
+
+INITIAL_BACKOFF = 1.0
+
+BACKOFF_FACTOR = 2.0
+
+
+class Error(Exception):
+  """Base-class for exceptions in this module."""
+
+
+class WorkItemError(Error):
+  """Error while processing a WorkItem."""
+
+
+class RetryException(Error):
+  """A non-fatal exception that indicates that a work item should be retried."""
+
+
+def InterruptibleSleep(sleep_time):
+  """Puts thread to sleep, checking this threads exit_flag four times a second.
+
+  Args:
+    sleep_time: Time to sleep.
+  """
+  slept = 0.0
+  epsilon = .0001
+  thread = threading.currentThread()
+  while slept < sleep_time - epsilon:
+    remaining = sleep_time - slept
+    this_sleep_time = min(remaining, 0.25)
+    time.sleep(this_sleep_time)
+    slept += this_sleep_time
+    if thread.exit_flag:
+      return
+
+
+class WorkerThread(threading.Thread):
+  """A WorkerThread to execute WorkItems.
+
+  Attributes:
+    exit_flag: A boolean indicating whether this thread should stop
+      its work and exit.
+  """
+
+  def __init__(self, thread_pool, thread_gate, name=None):
+    """Initialize a WorkerThread instance.
+
+    Args:
+      thread_pool: An AdaptiveThreadPool instance.
+      thread_gate: A ThreadGate instance.
+      name: A name for this WorkerThread.
+    """
+    threading.Thread.__init__(self)
+
+    self.setDaemon(True)
+
+    self.exit_flag = False
+    self.__error = None
+    self.__traceback = None
+    self.__thread_pool = thread_pool
+    self.__work_queue = thread_pool.requeue
+    self.__thread_gate = thread_gate
+    if not name:
+      self.__name = 'Anonymous_' + self.__class__.__name__
+    else:
+      self.__name = name
+
+  def run(self):
+    """Perform the work of the thread."""
+    logger.debug('[%s] %s: started', self.getName(), self.__class__.__name__)
+
+    try:
+      self.WorkOnItems()
+    except:
+      self.SetError()
+
+    logger.debug('[%s] %s: exiting', self.getName(), self.__class__.__name__)
+
+  def SetError(self):
+    """Sets the error and traceback information for this thread.
+
+    This must be called from an exception handler.
+    """
+    if not self.__error:
+      exc_info = sys.exc_info()
+      self.__error = exc_info[1]
+      self.__traceback = exc_info[2]
+      logger.exception('[%s] %s:', self.getName(), self.__class__.__name__)
+
+  def WorkOnItems(self):
+    """Perform the work of a WorkerThread."""
+    while not self.exit_flag:
+      item = None
+      self.__thread_gate.StartWork()
+      try:
+        status, instruction = WorkItem.FAILURE, ThreadGate.DECREASE
+        try:
+          if self.exit_flag:
+            instruction = ThreadGate.HOLD
+            break
+
+          try:
+            item = self.__work_queue.get(block=True, timeout=1.0)
+          except Queue.Empty:
+            instruction = ThreadGate.HOLD
+            continue
+          if item == _THREAD_SHOULD_EXIT or self.exit_flag:
+            status, instruction = WorkItem.SUCCESS, ThreadGate.HOLD
+            break
+
+          logger.debug('[%s] Got work item %s', self.getName(), item)
+
+          status, instruction = item.PerformWork(self.__thread_pool)
+        except RetryException:
+          status, instruction = WorkItem.RETRY, ThreadGate.HOLD
+        except:
+          self.SetError()
+          raise
+
+      finally:
+        try:
+          if item:
+            if status == WorkItem.SUCCESS:
+              self.__work_queue.task_done()
+            elif status == WorkItem.RETRY:
+              try:
+                self.__work_queue.reput(item, block=False)
+              except Queue.Full:
+                logger.error('[%s] Failed to reput work item.', self.getName())
+                raise Error('Failed to reput work item')
+            else:
+              if not self.__error:
+                if item.error:
+                  self.__error = item.error
+                  self.__traceback = item.traceback
+                else:
+                  self.__error = WorkItemError(
+                      'Fatal error while processing %s' % item)
+                raise self.__error
+
+        finally:
+          self.__thread_gate.FinishWork(instruction=instruction)
+
+  def CheckError(self):
+    """If an error is present, then log it."""
+    if self.__error:
+      logger.error('Error in %s: %s', self.getName(), self.__error)
+      if self.__traceback:
+        logger.debug('%s', ''.join(traceback.format_exception(
+            self.__error.__class__,
+            self.__error,
+            self.__traceback)))
+
+  def __str__(self):
+    return self.__name
+
+
+class AdaptiveThreadPool(object):
+  """A thread pool which processes WorkItems from a queue.
+
+  Attributes:
+    requeue: The requeue instance which holds work items for this
+      thread pool.
+  """
+
+  def __init__(self,
+               num_threads,
+               queue_size=None,
+               base_thread_name=None,
+               worker_thread_factory=WorkerThread,
+               queue_factory=Queue.Queue):
+    """Initialize an AdaptiveThreadPool.
+
+    An adaptive thread pool executes WorkItems using a number of
+    WorkerThreads.  WorkItems represent items of work that may
+    succeed, soft fail, or hard fail. In addition, a completed work
+    item can signal this AdaptiveThreadPool to enable more or fewer
+    threads.  Initially one thread is active.  Soft failures are
+    reqeueud to be retried.  Hard failures cause this
+    AdaptiveThreadPool to shut down entirely.  See the WorkItem class
+    for more details.
+
+    Args:
+      num_threads: The number of threads to use.
+      queue_size: The size of the work item queue to use.
+      base_thread_name: A string from which worker thread names are derived.
+      worker_thread_factory: A factory which procudes WorkerThreads.
+      queue_factory: Used for dependency injection.
+    """
+    if queue_size is None:
+      queue_size = num_threads
+    self.requeue = ReQueue(queue_size, queue_factory=queue_factory)
+    self.__thread_gate = ThreadGate(num_threads)
+    self.__num_threads = num_threads
+    self.__threads = []
+    for i in xrange(num_threads):
+      thread = worker_thread_factory(self, self.__thread_gate)
+      if base_thread_name:
+        base = base_thread_name
+      else:
+        base = thread.__class__.__name__
+      thread.name = '%s-%d' % (base, i)
+      self.__threads.append(thread)
+      thread.start()
+
+  def num_threads(self):
+    """Return the number of threads in this thread pool."""
+    return self.__num_threads
+
+  def Threads(self):
+    """Yields the registered threads."""
+    for thread in self.__threads:
+      yield thread
+
+  def SubmitItem(self, item, block=True, timeout=0.0):
+    """Submit a WorkItem to the AdaptiveThreadPool.
+
+    Args:
+      item: A WorkItem instance.
+      block: Whether to block on submitting if the submit queue is full.
+      timeout: Time wait for room in the queue if block is True, 0.0 to
+        block indefinitely.
+
+    Raises:
+      Queue.Full if the submit queue is full.
+    """
+    self.requeue.put(item, block=block, timeout=timeout)
+
+  def QueuedItemCount(self):
+    """Returns the number of items currently in the queue."""
+    return self.requeue.qsize()
+
+  def Shutdown(self):
+    """Shutdown the thread pool.
+
+    Tasks may remain unexecuted in the submit queue.
+    """
+    while not self.requeue.empty():
+      try:
+        unused_item = self.requeue.get_nowait()
+        self.requeue.task_done()
+      except Queue.Empty:
+        pass
+    for thread in self.__threads:
+      thread.exit_flag = True
+      self.requeue.put(_THREAD_SHOULD_EXIT)
+    self.__thread_gate.EnableAllThreads()
+
+  def Wait(self):
+    """Wait until all work items have been completed."""
+    self.requeue.join()
+
+  def JoinThreads(self):
+    """Wait for all threads to exit."""
+    for thread in self.__threads:
+      logger.debug('Waiting for %s to exit' % str(thread))
+      thread.join()
+
+  def CheckErrors(self):
+    """Output logs for any errors that occurred in the worker threads."""
+    for thread in self.__threads:
+      thread.CheckError()
+
+
+class ThreadGate(object):
+  """Manage the number of active worker threads.
+
+  The ThreadGate limits the number of threads that are simultaneously
+  active in order to implement adaptive rate control.
+
+  Initially the ThreadGate allows only one thread to be active.  For
+  each successful work item, another thread is activated and for each
+  failed item, the number of active threads is reduced by one.  When only
+  one thread is active, failures will cause exponential backoff.
+
+  For example, a ThreadGate instance, thread_gate can be used in a number
+  of threads as so:
+
+  # Block until this thread is enabled for work.
+  thread_gate.StartWork()
+  try:
+    status = DoSomeWorkInvolvingLimitedSharedResources()
+    suceeded = IsStatusGood(status)
+    badly_failed = IsStatusVeryBad(status)
+  finally:
+    if suceeded:
+      # Suceeded, add more simultaneously enabled threads to the task.
+      thread_gate.FinishWork(instruction=ThreadGate.INCREASE)
+    elif badly_failed:
+      # Failed, or succeeded but with high resource load, reduce number of
+      # workers.
+      thread_gate.FinishWork(instruction=ThreadGate.DECREASE)
+    else:
+      # We succeeded, but don't want to add more workers to the task.
+      thread_gate.FinishWork(instruction=ThreadGate.HOLD)
+
+  the thread_gate will enable and disable/backoff threads in response to
+  resource load conditions.
+
+  StartWork can block indefinitely. FinishWork, while not
+  lock-free, should never block absent a demonic scheduler.
+  """
+
+  INCREASE = 'increase'
+  HOLD = 'hold'
+  DECREASE = 'decrease'
+
+  def __init__(self,
+               num_threads,
+               sleep=InterruptibleSleep):
+    """Constructor for ThreadGate instances.
+
+    Args:
+      num_threads: The total number of threads using this gate.
+      sleep: Used for dependency injection.
+    """
+    self.__enabled_count = 1
+    self.__lock = threading.Lock()
+    self.__thread_semaphore = threading.Semaphore(self.__enabled_count)
+    self.__num_threads = num_threads
+    self.__backoff_time = 0
+    self.__sleep = sleep
+
+  def num_threads(self):
+    return self.__num_threads
+
+  def EnableThread(self):
+    """Enable one more worker thread."""
+    self.__lock.acquire()
+    try:
+      self.__enabled_count += 1
+    finally:
+      self.__lock.release()
+    self.__thread_semaphore.release()
+
+  def EnableAllThreads(self):
+    """Enable all worker threads."""
+    for unused_idx in xrange(self.__num_threads - self.__enabled_count):
+      self.EnableThread()
+
+  def StartWork(self):
+    """Starts a critical section in which the number of workers is limited.
+
+    Starts a critical section which allows self.__enabled_count
+    simultaneously operating threads. The critical section is ended by
+    calling self.FinishWork().
+    """
+    self.__thread_semaphore.acquire()
+    if self.__backoff_time > 0.0:
+      if not threading.currentThread().exit_flag:
+        logger.info('Backing off due to errors: %.1f seconds',
+                    self.__backoff_time)
+        self.__sleep(self.__backoff_time)
+
+  def FinishWork(self, instruction=None):
+    """Ends a critical section started with self.StartWork()."""
+    if not instruction or instruction == ThreadGate.HOLD:
+      self.__thread_semaphore.release()
+
+    elif instruction == ThreadGate.INCREASE:
+      if self.__backoff_time > 0.0:
+        logger.info('Resetting backoff to 0.0')
+        self.__backoff_time = 0.0
+      do_enable = False
+      self.__lock.acquire()
+      try:
+        if self.__num_threads > self.__enabled_count:
+          do_enable = True
+          self.__enabled_count += 1
+      finally:
+        self.__lock.release()
+      if do_enable:
+        logger.debug('Increasing active thread count to %d',
+                     self.__enabled_count)
+        self.__thread_semaphore.release()
+      self.__thread_semaphore.release()
+
+    elif instruction == ThreadGate.DECREASE:
+      do_disable = False
+      self.__lock.acquire()
+      try:
+        if self.__enabled_count > 1:
+          do_disable = True
+          self.__enabled_count -= 1
+        else:
+          if self.__backoff_time == 0.0:
+            self.__backoff_time = INITIAL_BACKOFF
+          else:
+            self.__backoff_time *= BACKOFF_FACTOR
+      finally:
+        self.__lock.release()
+        if do_disable:
+          logger.debug('Decreasing the number of active threads to %d',
+                       self.__enabled_count)
+        else:
+          self.__thread_semaphore.release()
+
+
+class WorkItem(object):
+  """Holds a unit of work."""
+
+  SUCCESS = 'success'
+  RETRY = 'retry'
+  FAILURE = 'failure'
+
+  def __init__(self, name):
+    self.__name = name
+
+  def PerformWork(self, thread_pool):
+    """Perform the work of this work item and report the results.
+
+    Args:
+      thread_pool: The AdaptiveThreadPool instance associated with this
+        thread.
+
+    Returns:
+      A tuple (status, instruction) of the work status and an instruction
+      for the ThreadGate.
+    """
+    raise NotImplementedError
+
+  def __str__(self):
+    return self.__name
--- a/thirdparty/google_appengine/google/appengine/tools/appcfg.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/appcfg.py	Mon Sep 07 20:27:37 2009 +0200
@@ -36,11 +36,13 @@
 import mimetypes
 import optparse
 import os
+import random
 import re
 import sha
 import sys
 import tempfile
 import time
+import urllib
 import urllib2
 
 import google
@@ -69,6 +71,11 @@
 
 MAX_LOG_LEVEL = 4
 
+MAX_BATCH_SIZE = 1000000
+MAX_BATCH_COUNT = 100
+MAX_BATCH_FILE_SIZE = 200000
+BATCH_OVERHEAD = 500
+
 verbosity = 1
 
 
@@ -220,6 +227,25 @@
   return max_tries > 0
 
 
+def _VersionList(release):
+  """Parse a version string into a list of ints.
+
+  Args:
+    release: The 'release' version, e.g. '1.2.4'.
+        (Due to YAML parsing this may also be an int or float.)
+
+  Returns:
+    A list of ints corresponding to the parts of the version string
+    between periods.  Example:
+      '1.2.4' -> [1, 2, 4]
+      '1.2.3.4' -> [1, 2, 3, 4]
+
+  Raises:
+    ValueError if not all the parts are valid integers.
+  """
+  return [int(part) for part in str(release).split('.')]
+
+
 class UpdateCheck(object):
   """Determines if the local SDK is the latest version.
 
@@ -332,10 +358,26 @@
       return
 
     latest = yaml.safe_load(response)
-    if latest['release'] == version['release']:
+    if version['release'] == latest['release']:
       logging.info('The SDK is up to date.')
       return
 
+    try:
+      this_release = _VersionList(version['release'])
+    except ValueError:
+      logging.warn('Could not parse this release version (%r)',
+                   version['release'])
+    else:
+      try:
+        advertised_release = _VersionList(latest['release'])
+      except ValueError:
+        logging.warn('Could not parse advertised release version (%r)',
+                     latest['release'])
+      else:
+        if this_release > advertised_release:
+          logging.info('This SDK release is newer than the advertised release.')
+          return
+
     api_versions = latest['api_versions']
     if self.config.api_version not in api_versions:
       self._Nag(
@@ -964,6 +1006,149 @@
     fp.close()
 
 
+class UploadBatcher(object):
+  """Helper to batch file uploads."""
+
+  def __init__(self, what, app_id, version, server):
+    """Constructor.
+
+    Args:
+      what: Either 'file' or 'blob' indicating what kind of objects
+        this batcher uploads.  Used in messages and URLs.
+      app_id: The application ID.
+      version: The application version string.
+      server: The RPC server.
+    """
+    assert what in ('file', 'blob'), repr(what)
+    self.what = what
+    self.app_id = app_id
+    self.version = version
+    self.server = server
+    self.single_url = '/api/appversion/add' + what
+    self.batch_url = self.single_url + 's'
+    self.batching = True
+    self.batch = []
+    self.batch_size = 0
+
+  def SendBatch(self):
+    """Send the current batch on its way.
+
+    If successful, resets self.batch and self.batch_size.
+
+    Raises:
+      HTTPError with code=404 if the server doesn't support batching.
+    """
+    boundary = 'boundary'
+    parts = []
+    for path, payload, mime_type in self.batch:
+      while boundary in payload:
+        boundary += '%04x' % random.randint(0, 0xffff)
+        assert len(boundary) < 80, 'Unexpected error, please try again.'
+      part = '\n'.join(['',
+                        'X-Appcfg-File: %s' % urllib.quote(path),
+                        'X-Appcfg-Hash: %s' % _Hash(payload),
+                        'Content-Type: %s' % mime_type,
+                        'Content-Length: %d' % len(payload),
+                        'Content-Transfer-Encoding: 8bit',
+                        '',
+                        payload,
+                        ])
+      parts.append(part)
+    parts.insert(0,
+                 'MIME-Version: 1.0\n'
+                 'Content-Type: multipart/mixed; boundary="%s"\n'
+                 '\n'
+                 'This is a message with multiple parts in MIME format.' %
+                 boundary)
+    parts.append('--\n')
+    delimiter = '\n--%s' % boundary
+    payload = delimiter.join(parts)
+    logging.info('Uploading batch of %d %ss to %s with boundary="%s".',
+                 len(self.batch), self.what, self.batch_url, boundary)
+    self.server.Send(self.batch_url,
+                     payload=payload,
+                     content_type='message/rfc822',
+                     app_id=self.app_id,
+                     version=self.version)
+    self.batch = []
+    self.batch_size = 0
+
+  def SendSingleFile(self, path, payload, mime_type):
+    """Send a single file on its way."""
+    logging.info('Uploading %s %s (%s bytes, type=%s) to %s.',
+                 self.what, path, len(payload), mime_type, self.single_url)
+    self.server.Send(self.single_url,
+                     payload=payload,
+                     content_type=mime_type,
+                     path=path,
+                     app_id=self.app_id,
+                     version=self.version)
+
+  def Flush(self):
+    """Flush the current batch.
+
+    This first attempts to send the batch as a single request; if that
+    fails because the server doesn't support batching, the files are
+    sent one by one, and self.batching is reset to False.
+
+    At the end, self.batch and self.batch_size are reset.
+    """
+    if not self.batch:
+      return
+    try:
+      self.SendBatch()
+    except urllib2.HTTPError, err:
+      if err.code != 404:
+        raise
+
+      logging.info('Old server detected; turning off %s batching.', self.what)
+      self.batching = False
+
+      for path, payload, mime_type in self.batch:
+        self.SendSingleFile(path, payload, mime_type)
+
+      self.batch = []
+      self.batch_size = 0
+
+  def AddToBatch(self, path, payload, mime_type):
+    """Batch a file, possibly flushing first, or perhaps upload it directly.
+
+    Args:
+      path: The name of the file.
+      payload: The contents of the file.
+      mime_type: The MIME Content-type of the file, or None.
+
+    If mime_type is None, application/octet-stream is substituted.
+    """
+    if not mime_type:
+      mime_type = 'application/octet-stream'
+    size = len(payload)
+    if size <= MAX_BATCH_FILE_SIZE:
+      if (len(self.batch) >= MAX_BATCH_COUNT or
+          self.batch_size + size > MAX_BATCH_SIZE):
+        self.Flush()
+      if self.batching:
+        logging.info('Adding %s %s (%s bytes, type=%s) to batch.',
+                     self.what, path, size, mime_type)
+        self.batch.append((path, payload, mime_type))
+        self.batch_size += size + BATCH_OVERHEAD
+        return
+    self.SendSingleFile(path, payload, mime_type)
+
+
+def _Hash(content):
+  """Compute the hash of the content.
+
+  Args:
+    content: The data to hash as a string.
+
+  Returns:
+    The string representation of the hash.
+  """
+  h = sha.new(content).hexdigest()
+  return '%s_%s_%s_%s_%s' % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40])
+
+
 class AppVersionUpload(object):
   """Provides facilities to upload a new appversion to the hosting service.
 
@@ -995,18 +1180,11 @@
     self.files = {}
     self.in_transaction = False
     self.deployed = False
-
-  def _Hash(self, content):
-    """Compute the hash of the content.
-
-    Args:
-      content: The data to hash as a string.
-
-    Returns:
-      The string representation of the hash.
-    """
-    h = sha.new(content).hexdigest()
-    return '%s_%s_%s_%s_%s' % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40])
+    self.batching = True
+    self.file_batcher = UploadBatcher('file', self.app_id, self.version,
+                                      self.server)
+    self.blob_batcher = UploadBatcher('blob', self.app_id, self.version,
+                                      self.server)
 
   def AddFile(self, path, file_handle):
     """Adds the provided file to the list to be pushed to the server.
@@ -1024,7 +1202,7 @@
       return
 
     pos = file_handle.tell()
-    content_hash = self._Hash(file_handle.read())
+    content_hash = _Hash(file_handle.read())
     file_handle.seek(pos, 0)
 
     self.files[path] = content_hash
@@ -1084,7 +1262,7 @@
     CloneFiles('/api/appversion/cloneblobs', blobs_to_clone, 'static')
     CloneFiles('/api/appversion/clonefiles', files_to_clone, 'application')
 
-    logging.info('Files to upload: ' + str(files_to_upload))
+    logging.debug('Files to upload: %s', files_to_upload)
 
     self.files = files_to_upload
     return sorted(files_to_upload.iterkeys())
@@ -1109,14 +1287,11 @@
 
     del self.files[path]
     mime_type = GetMimeTypeIfStaticFile(self.config, path)
-    if mime_type is not None:
-      self.server.Send('/api/appversion/addblob', app_id=self.app_id,
-                       version=self.version, path=path, content_type=mime_type,
-                       payload=file_handle.read())
+    payload = file_handle.read()
+    if mime_type is None:
+      self.file_batcher.AddToBatch(path, payload, mime_type)
     else:
-      self.server.Send('/api/appversion/addfile', app_id=self.app_id,
-                       version=self.version, path=path,
-                       payload=file_handle.read())
+      self.blob_batcher.AddToBatch(path, payload, mime_type)
 
   def Commit(self):
     """Commits the transaction, making the new app version available.
@@ -1249,10 +1424,9 @@
     try:
       missing_files = self.Begin()
       if missing_files:
-        StatusUpdate('Uploading %d files.' % len(missing_files))
+        StatusUpdate('Uploading %d files and blobs.' % len(missing_files))
         num_files = 0
         for missing_file in missing_files:
-          logging.info('Uploading file \'%s\'' % missing_file)
           file_handle = openfunc(missing_file)
           try:
             self.UploadFile(missing_file, file_handle)
@@ -1260,7 +1434,11 @@
             file_handle.close()
           num_files += 1
           if num_files % 500 == 0:
-            StatusUpdate('Uploaded %d files.' % num_files)
+            StatusUpdate('Processed %d out of %s.' %
+                         (num_files, len(missing_files)))
+        self.file_batcher.Flush()
+        self.blob_batcher.Flush()
+        StatusUpdate('Uploaded %d files and blobs' % num_files)
 
       self.Commit()
 
@@ -1268,6 +1446,10 @@
       logging.info('User interrupted. Aborting.')
       self.Rollback()
       raise
+    except urllib2.HTTPError, err:
+      logging.info('HTTP Error (%s)', err)
+      self.Rollback()
+      raise
     except:
       logging.exception('An unexpected error occurred. Aborting.')
       self.Rollback()
@@ -1856,6 +2038,12 @@
 
     if self.options.num_days is None:
       self.options.num_days = int(not self.options.append)
+
+    try:
+      end_date = self._ParseEndDate(self.options.end_date)
+    except ValueError:
+      self.parser.error('End date must be in the format YYYY-MM-DD.')
+
     basepath = self.args[0]
     appyaml = self._ParseAppYaml(basepath)
     rpc_server = self._GetRpcServer()
@@ -1863,11 +2051,27 @@
                                    self.options.num_days,
                                    self.options.append,
                                    self.options.severity,
-                                   time.time(),
+                                   end_date,
                                    self.options.vhost,
                                    self.options.include_vhost)
     logs_requester.DownloadLogs()
 
+  def _ParseEndDate(self, date, time_func=time.time):
+    """Translates a user-readable end date to a POSIX timestamp.
+
+    Args:
+      date: A utc date string as YYYY-MM-DD.
+      time_func: time.time() function for testing.
+
+    Returns:
+      A POSIX timestamp representing the last moment of that day.
+      If no date is given, returns a timestamp representing now.
+    """
+    if not date:
+      return time_func()
+    struct_time = time.strptime('%s' % date, '%Y-%m-%d')
+    return calendar.timegm(struct_time) + 86400
+
   def _RequestLogsOptions(self, parser):
     """Adds request_logs-specific options to 'parser'.
 
@@ -1896,6 +2100,10 @@
     parser.add_option('--include_vhost', dest='include_vhost',
                       action='store_true', default=False,
                       help='Include virtual host in log messages.')
+    parser.add_option('--end_date', dest='end_date',
+                      action='store', default='',
+                      help='End date (as YYYY-MM-DD) of period for log data. '
+                      'Defaults to today.')
 
   def CronInfo(self, now=None, output=sys.stdout):
     """Displays information about cron definitions.
@@ -2032,7 +2240,12 @@
                      'email',
                      'debug',
                      'exporter_opts',
+                     'mapper_opts',
                      'result_db_filename',
+                     'mapper_opts',
+                     'dry_run',
+                     'dump',
+                     'restore',
                      )])
 
   def PerformDownload(self, run_fn=None):
@@ -2050,6 +2263,9 @@
     args = self._MakeLoaderArgs()
     args['download'] = True
     args['has_header'] = False
+    args['map'] = False
+    args['dump'] = False
+    args['restore'] = False
 
     run_fn(args)
 
@@ -2067,6 +2283,9 @@
 
     args = self._MakeLoaderArgs()
     args['download'] = False
+    args['map'] = False
+    args['dump'] = False
+    args['restore'] = False
 
     run_fn(args)
 
@@ -2114,6 +2333,9 @@
                       help='File to write bulkloader logs.  If not supplied '
                       'then a new log file will be created, named: '
                       'bulkloader-log-TIMESTAMP.')
+    parser.add_option('--dry_run', action='store_true',
+                      dest='dry_run', default=False,
+                      help='Do not execute any remote_api calls')
 
   def _PerformUploadOptions(self, parser):
     """Adds 'upload_data' specific options to the 'parser' passed in.
--- a/thirdparty/google_appengine/google/appengine/tools/bulkloader.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/bulkloader.py	Mon Sep 07 20:27:37 2009 +0200
@@ -14,7 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
 """Imports data over HTTP.
 
 Usage:
@@ -33,7 +32,7 @@
                             the URL endpoint. The more data per row/Entity, the
                             smaller the batch size should be. (Default 10)
     --config_file=<path>    File containing Model and Loader definitions.
-                            (Required)
+                            (Required unless --dump or --restore are used)
     --db_filename=<path>    Specific progress database to write to, or to
                             resume from. If not supplied, then a new database
                             will be started, named:
@@ -41,6 +40,8 @@
                             The special filename "skip" may be used to simply
                             skip reading/writing any progress information.
     --download              Export entities to a file.
+    --dry_run               Do not execute any remote_api calls.
+    --dump                  Use zero-configuration dump format.
     --email=<string>        The username to use. Will prompt if omitted.
     --exporter_opts=<string>
                             A string to pass to the Exporter.initialize method.
@@ -54,9 +55,12 @@
     --log_file=<path>       File to write bulkloader logs.  If not supplied
                             then a new log file will be created, named:
                             bulkloader-log-TIMESTAMP.
+    --map                   Map an action across datastore entities.
+    --mapper_opts=<string>  A string to pass to the Mapper.Initialize method.
     --num_threads=<int>     Number of threads to use for uploading entities
                             (Default 10)
     --passin                Read the login password from stdin.
+    --restore               Restore from zero-configuration dump format.
     --result_db_filename=<path>
                             Result database to write to for downloads.
     --rps_limit=<int>       The maximum number of records per second to
@@ -78,7 +82,6 @@
 
 
 
-import cPickle
 import csv
 import errno
 import getopt
@@ -88,20 +91,31 @@
 import os
 import Queue
 import re
+import shutil
 import signal
 import StringIO
 import sys
 import threading
 import time
+import traceback
 import urllib2
 import urlparse
 
+from google.appengine.datastore import entity_pb
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore
 from google.appengine.api import datastore_errors
+from google.appengine.datastore import datastore_pb
 from google.appengine.ext import db
+from google.appengine.ext import key_range as key_range_module
 from google.appengine.ext.db import polymodel
 from google.appengine.ext.remote_api import remote_api_stub
+from google.appengine.ext.remote_api import throttle as remote_api_throttle
 from google.appengine.runtime import apiproxy_errors
+from google.appengine.tools import adaptive_thread_pool
 from google.appengine.tools import appengine_rpc
+from google.appengine.tools.requeue import ReQueue
 
 try:
   import sqlite3
@@ -110,10 +124,14 @@
 
 logger = logging.getLogger('google.appengine.tools.bulkloader')
 
+KeyRange = key_range_module.KeyRange
+
 DEFAULT_THREAD_COUNT = 10
 
 DEFAULT_BATCH_SIZE = 10
 
+DEFAULT_DOWNLOAD_BATCH_SIZE = 100
+
 DEFAULT_QUEUE_SIZE = DEFAULT_THREAD_COUNT * 10
 
 _THREAD_SHOULD_EXIT = '_THREAD_SHOULD_EXIT'
@@ -125,9 +143,7 @@
 
 STATE_GETTING = 1
 STATE_GOT = 2
-STATE_NOT_GOT = 3
-
-MINIMUM_THROTTLE_SLEEP_DURATION = 0.001
+STATE_ERROR = 3
 
 DATA_CONSUMED_TO_HERE = 'DATA_CONSUMED_TO_HERE'
 
@@ -142,16 +158,8 @@
 
 DEFAULT_REQUEST_LIMIT = 8
 
-BANDWIDTH_UP = 'http-bandwidth-up'
-BANDWIDTH_DOWN = 'http-bandwidth-down'
-REQUESTS = 'http-requests'
-HTTPS_BANDWIDTH_UP = 'https-bandwidth-up'
-HTTPS_BANDWIDTH_DOWN = 'https-bandwidth-down'
-HTTPS_REQUESTS = 'https-requests'
-RECORDS = 'records'
-
-MAXIMUM_INCREASE_DURATION = 8.0
-MAXIMUM_HOLD_DURATION = 10.0
+MAXIMUM_INCREASE_DURATION = 5.0
+MAXIMUM_HOLD_DURATION = 12.0
 
 
 def ImportStateMessage(state):
@@ -170,7 +178,17 @@
       STATE_READ: 'Batch read from file.',
       STATE_GETTING: 'Fetching batch from server',
       STATE_GOT: 'Batch successfully fetched.',
-      STATE_NOT_GOT: 'Error while fetching batch'
+      STATE_ERROR: 'Error while fetching batch'
+  }[state])
+
+
+def MapStateMessage(state):
+  """Converts a numeric state identifier to a status message."""
+  return ({
+      STATE_READ: 'Batch read from file.',
+      STATE_GETTING: 'Querying for batch from server',
+      STATE_GOT: 'Batch successfully fetched.',
+      STATE_ERROR: 'Error while fetching or mapping.'
   }[state])
 
 
@@ -180,7 +198,7 @@
       STATE_READ: 'READ',
       STATE_GETTING: 'GETTING',
       STATE_GOT: 'GOT',
-      STATE_NOT_GOT: 'NOT_GOT'
+      STATE_ERROR: 'NOT_GOT'
   }[state])
 
 
@@ -190,7 +208,7 @@
       STATE_READ: 'READ',
       STATE_GETTING: 'SENDING',
       STATE_GOT: 'SENT',
-      STATE_NOT_GOT: 'NOT_SENT'
+      STATE_NOT_SENT: 'NOT_SENT'
   }[state])
 
 
@@ -234,16 +252,35 @@
   """A filename passed in by the user refers to a non-writable output file."""
 
 
-class KeyRangeError(Error):
-  """Error while trying to generate a KeyRange."""
-
-
 class BadStateError(Error):
   """A work item in an unexpected state was encountered."""
 
 
+class KeyRangeError(Error):
+  """An error during construction of a KeyRangeItem."""
+
+
+class FieldSizeLimitError(Error):
+  """The csv module tried to read a field larger than the size limit."""
+
+  def __init__(self, limit):
+    self.message = """
+A field in your CSV input file has exceeded the current limit of %d.
+
+You can raise this limit by adding the following lines to your config file:
+
+import csv
+csv.field_size_limit(new_limit)
+
+where new_limit is number larger than the size in bytes of the largest
+field in your CSV.
+""" % limit
+    Error.__init__(self, self.message)
+
+
 class NameClashError(Error):
   """A name clash occurred while trying to alias old method names."""
+
   def __init__(self, old_name, new_name, klass):
     Error.__init__(self, old_name, new_name, klass)
     self.old_name = old_name
@@ -253,48 +290,51 @@
 
 def GetCSVGeneratorFactory(kind, csv_filename, batch_size, csv_has_header,
                            openfile=open, create_csv_reader=csv.reader):
-  """Return a factory that creates a CSV-based WorkItem generator.
+  """Return a factory that creates a CSV-based UploadWorkItem generator.
 
   Args:
     kind: The kind of the entities being uploaded.
     csv_filename: File on disk containing CSV data.
-    batch_size: Maximum number of CSV rows to stash into a WorkItem.
+    batch_size: Maximum number of CSV rows to stash into an UploadWorkItem.
     csv_has_header: Whether to skip the first row of the CSV.
     openfile: Used for dependency injection.
     create_csv_reader: Used for dependency injection.
 
   Returns:
     A callable (accepting the Progress Queue and Progress Generators
-    as input) which creates the WorkItem generator.
+    as input) which creates the UploadWorkItem generator.
   """
   loader = Loader.RegisteredLoader(kind)
   loader._Loader__openfile = openfile
   loader._Loader__create_csv_reader = create_csv_reader
   record_generator = loader.generate_records(csv_filename)
 
-  def CreateGenerator(progress_queue, progress_generator):
-    """Initialize a WorkItem generator linked to a progress generator and queue.
+  def CreateGenerator(request_manager, progress_queue, progress_generator):
+    """Initialize a UploadWorkItem generator.
 
     Args:
+      request_manager: A RequestManager instance.
       progress_queue: A ProgressQueue instance to send progress information.
       progress_generator: A generator of progress information or None.
 
     Returns:
-      A WorkItemGenerator instance.
+      An UploadWorkItemGenerator instance.
     """
-    return WorkItemGenerator(progress_queue,
-                             progress_generator,
-                             record_generator,
-                             csv_has_header,
-                             batch_size)
+    return UploadWorkItemGenerator(request_manager,
+                                   progress_queue,
+                                   progress_generator,
+                                   record_generator,
+                                   csv_has_header,
+                                   batch_size)
 
   return CreateGenerator
 
 
-class WorkItemGenerator(object):
-  """Reads rows from a row generator and generates WorkItems of batches."""
+class UploadWorkItemGenerator(object):
+  """Reads rows from a row generator and generates UploadWorkItems."""
 
   def __init__(self,
+               request_manager,
                progress_queue,
                progress_generator,
                record_generator,
@@ -303,12 +343,15 @@
     """Initialize a WorkItemGenerator.
 
     Args:
+      request_manager: A RequestManager instance with which to associate
+        WorkItems.
       progress_queue: A progress queue with which to associate WorkItems.
       progress_generator: A generator of progress information.
       record_generator: A generator of data records.
       skip_first: Whether to skip the first data record.
       batch_size: The number of data records per WorkItem.
     """
+    self.request_manager = request_manager
     self.progress_queue = progress_queue
     self.progress_generator = progress_generator
     self.reader = record_generator
@@ -360,30 +403,29 @@
       self.line_number += 1
 
   def _MakeItem(self, key_start, key_end, rows, progress_key=None):
-    """Makes a WorkItem containing the given rows, with the given keys.
+    """Makes a UploadWorkItem containing the given rows, with the given keys.
 
     Args:
-      key_start: The start key for the WorkItem.
-      key_end: The end key for the WorkItem.
-      rows: A list of the rows for the WorkItem.
-      progress_key: The progress key for the WorkItem
+      key_start: The start key for the UploadWorkItem.
+      key_end: The end key for the UploadWorkItem.
+      rows: A list of the rows for the UploadWorkItem.
+      progress_key: The progress key for the UploadWorkItem
 
     Returns:
-      A WorkItem instance for the given batch.
+      An UploadWorkItem instance for the given batch.
     """
     assert rows
 
-    item = WorkItem(self.progress_queue, rows,
-                    key_start, key_end,
-                    progress_key=progress_key)
+    item = UploadWorkItem(self.request_manager, self.progress_queue, rows,
+                          key_start, key_end, progress_key=progress_key)
 
     return item
 
   def Batches(self):
-    """Reads from the record_generator and generates WorkItems.
+    """Reads from the record_generator and generates UploadWorkItems.
 
     Yields:
-      Instances of class WorkItem
+      Instances of class UploadWorkItem
 
     Raises:
       ResumeError: If the progress database and data file indicate a different
@@ -468,37 +510,50 @@
     """
     csv_file = self.openfile(self.csv_filename, 'rb')
     reader = self.create_csv_reader(csv_file, skipinitialspace=True)
-    return reader
-
-
-class KeyRangeGenerator(object):
+    try:
+      for record in reader:
+        yield record
+    except csv.Error, e:
+      if e.args and e.args[0].startswith('field larger than field limit'):
+        limit = e.args[1]
+        raise FieldSizeLimitError(limit)
+      else:
+        raise
+
+
+class KeyRangeItemGenerator(object):
   """Generates ranges of keys to download.
 
   Reads progress information from the progress database and creates
-  KeyRange objects corresponding to incompletely downloaded parts of an
+  KeyRangeItem objects corresponding to incompletely downloaded parts of an
   export.
   """
 
-  def __init__(self, kind, progress_queue, progress_generator):
-    """Initialize the KeyRangeGenerator.
+  def __init__(self, request_manager, kind, progress_queue, progress_generator,
+               key_range_item_factory):
+    """Initialize the KeyRangeItemGenerator.
 
     Args:
+      request_manager: A RequestManager instance.
       kind: The kind of entities being transferred.
       progress_queue: A queue used for tracking progress information.
       progress_generator: A generator of prior progress information, or None
         if there is no prior status.
+      key_range_item_factory: A factory to produce KeyRangeItems.
     """
+    self.request_manager = request_manager
     self.kind = kind
     self.row_count = 0
     self.xfer_count = 0
     self.progress_queue = progress_queue
     self.progress_generator = progress_generator
+    self.key_range_item_factory = key_range_item_factory
 
   def Batches(self):
     """Iterate through saved progress information.
 
     Yields:
-      KeyRange instances corresponding to undownloaded key ranges.
+      KeyRangeItem instances corresponding to undownloaded key ranges.
     """
     if self.progress_generator is not None:
       for progress_key, state, key_start, key_end in self.progress_generator:
@@ -506,397 +561,27 @@
           key_start = ParseKey(key_start)
           key_end = ParseKey(key_end)
 
-          result = KeyRange(self.progress_queue,
-                            self.kind,
-                            key_start=key_start,
-                            key_end=key_end,
-                            progress_key=progress_key,
-                            direction=KeyRange.ASC,
-                            state=STATE_READ)
+          key_range = KeyRange(key_start=key_start,
+                               key_end=key_end)
+
+          result = self.key_range_item_factory(self.request_manager,
+                                               self.progress_queue,
+                                               self.kind,
+                                               key_range,
+                                               progress_key=progress_key,
+                                               state=STATE_READ)
           yield result
     else:
-
-      yield KeyRange(
-          self.progress_queue, self.kind,
-          key_start=None,
-          key_end=None,
-          direction=KeyRange.DESC)
-
-
-class ReQueue(object):
-  """A special thread-safe queue.
-
-  A ReQueue allows unfinished work items to be returned with a call to
-  reput().  When an item is reput, task_done() should *not* be called
-  in addition, getting an item that has been reput does not increase
-  the number of outstanding tasks.
-
-  This class shares an interface with Queue.Queue and provides the
-  additional reput method.
-  """
-
-  def __init__(self,
-               queue_capacity,
-               requeue_capacity=None,
-               queue_factory=Queue.Queue,
-               get_time=time.time):
-    """Initialize a ReQueue instance.
-
-    Args:
-      queue_capacity: The number of items that can be put in the ReQueue.
-      requeue_capacity: The numer of items that can be reput in the ReQueue.
-      queue_factory: Used for dependency injection.
-      get_time: Used for dependency injection.
-    """
-    if requeue_capacity is None:
-      requeue_capacity = queue_capacity
-
-    self.get_time = get_time
-    self.queue = queue_factory(queue_capacity)
-    self.requeue = queue_factory(requeue_capacity)
-    self.lock = threading.Lock()
-    self.put_cond = threading.Condition(self.lock)
-    self.get_cond = threading.Condition(self.lock)
-
-  def _DoWithTimeout(self,
-                     action,
-                     exc,
-                     wait_cond,
-                     done_cond,
-                     lock,
-                     timeout=None,
-                     block=True):
-    """Performs the given action with a timeout.
-
-    The action must be non-blocking, and raise an instance of exc on a
-    recoverable failure.  If the action fails with an instance of exc,
-    we wait on wait_cond before trying again.  Failure after the
-    timeout is reached is propagated as an exception.  Success is
-    signalled by notifying on done_cond and returning the result of
-    the action.  If action raises any exception besides an instance of
-    exc, it is immediately propagated.
-
-    Args:
-      action: A callable that performs a non-blocking action.
-      exc: An exception type that is thrown by the action to indicate
-        a recoverable error.
-      wait_cond: A condition variable which should be waited on when
-        action throws exc.
-      done_cond: A condition variable to signal if the action returns.
-      lock: The lock used by wait_cond and done_cond.
-      timeout: A non-negative float indicating the maximum time to wait.
-      block: Whether to block if the action cannot complete immediately.
-
-    Returns:
-      The result of the action, if it is successful.
-
-    Raises:
-      ValueError: If the timeout argument is negative.
-    """
-    if timeout is not None and timeout < 0.0:
-      raise ValueError('\'timeout\' must not be a negative  number')
-    if not block:
-      timeout = 0.0
-    result = None
-    success = False
-    start_time = self.get_time()
-    lock.acquire()
-    try:
-      while not success:
-        try:
-          result = action()
-          success = True
-        except Exception, e:
-          if not isinstance(e, exc):
-            raise e
-          if timeout is not None:
-            elapsed_time = self.get_time() - start_time
-            timeout -= elapsed_time
-            if timeout <= 0.0:
-              raise e
-          wait_cond.wait(timeout)
-    finally:
-      if success:
-        done_cond.notify()
-      lock.release()
-    return result
-
-  def put(self, item, block=True, timeout=None):
-    """Put an item into the requeue.
-
-    Args:
-      item: An item to add to the requeue.
-      block: Whether to block if the requeue is full.
-      timeout: Maximum on how long to wait until the queue is non-full.
-
-    Raises:
-      Queue.Full if the queue is full and the timeout expires.
-    """
-    def PutAction():
-      self.queue.put(item, block=False)
-    self._DoWithTimeout(PutAction,
-                        Queue.Full,
-                        self.get_cond,
-                        self.put_cond,
-                        self.lock,
-                        timeout=timeout,
-                        block=block)
-
-  def reput(self, item, block=True, timeout=None):
-    """Re-put an item back into the requeue.
-
-    Re-putting an item does not increase the number of outstanding
-    tasks, so the reput item should be uniquely associated with an
-    item that was previously removed from the requeue and for which
-    TaskDone has not been called.
-
-    Args:
-      item: An item to add to the requeue.
-      block: Whether to block if the requeue is full.
-      timeout: Maximum on how long to wait until the queue is non-full.
-
-    Raises:
-      Queue.Full is the queue is full and the timeout expires.
-    """
-    def ReputAction():
-      self.requeue.put(item, block=False)
-    self._DoWithTimeout(ReputAction,
-                        Queue.Full,
-                        self.get_cond,
-                        self.put_cond,
-                        self.lock,
-                        timeout=timeout,
-                        block=block)
-
-  def get(self, block=True, timeout=None):
-    """Get an item from the requeue.
-
-    Args:
-      block: Whether to block if the requeue is empty.
-      timeout: Maximum on how long to wait until the requeue is non-empty.
-
-    Returns:
-      An item from the requeue.
-
-    Raises:
-      Queue.Empty if the queue is empty and the timeout expires.
-    """
-    def GetAction():
-      try:
-        result = self.requeue.get(block=False)
-        self.requeue.task_done()
-      except Queue.Empty:
-        result = self.queue.get(block=False)
-      return result
-    return self._DoWithTimeout(GetAction,
-                               Queue.Empty,
-                               self.put_cond,
-                               self.get_cond,
-                               self.lock,
-                               timeout=timeout,
-                               block=block)
-
-  def join(self):
-    """Blocks until all of the items in the requeue have been processed."""
-    self.queue.join()
-
-  def task_done(self):
-    """Indicate that a previously enqueued item has been fully processed."""
-    self.queue.task_done()
-
-  def empty(self):
-    """Returns true if the requeue is empty."""
-    return self.queue.empty() and self.requeue.empty()
-
-  def get_nowait(self):
-    """Try to get an item from the queue without blocking."""
-    return self.get(block=False)
-
-  def qsize(self):
-    return self.queue.qsize() + self.requeue.qsize()
-
-
-class ThrottleHandler(urllib2.BaseHandler):
-  """A urllib2 handler for http and https requests that adds to a throttle."""
-
-  def __init__(self, throttle):
-    """Initialize a ThrottleHandler.
-
-    Args:
-      throttle: A Throttle instance to call for bandwidth and http/https request
-        throttling.
-    """
-    self.throttle = throttle
-
-  def AddRequest(self, throttle_name, req):
-    """Add to bandwidth throttle for given request.
-
-    Args:
-      throttle_name: The name of the bandwidth throttle to add to.
-      req: The request whose size will be added to the throttle.
-    """
-    size = 0
-    for key, value in req.headers.iteritems():
-      size += len('%s: %s\n' % (key, value))
-    for key, value in req.unredirected_hdrs.iteritems():
-      size += len('%s: %s\n' % (key, value))
-    (unused_scheme,
-     unused_host_port, url_path,
-     unused_query, unused_fragment) = urlparse.urlsplit(req.get_full_url())
-    size += len('%s %s HTTP/1.1\n' % (req.get_method(), url_path))
-    data = req.get_data()
-    if data:
-      size += len(data)
-    self.throttle.AddTransfer(throttle_name, size)
-
-  def AddResponse(self, throttle_name, res):
-    """Add to bandwidth throttle for given response.
-
-    Args:
-      throttle_name: The name of the bandwidth throttle to add to.
-      res: The response whose size will be added to the throttle.
-    """
-    content = res.read()
-    def ReturnContent():
-      return content
-    res.read = ReturnContent
-    size = len(content)
-    headers = res.info()
-    for key, value in headers.items():
-      size += len('%s: %s\n' % (key, value))
-    self.throttle.AddTransfer(throttle_name, size)
-
-  def http_request(self, req):
-    """Process an HTTP request.
-
-    If the throttle is over quota, sleep first.  Then add request size to
-    throttle before returning it to be sent.
-
-    Args:
-      req: A urllib2.Request object.
-
-    Returns:
-      The request passed in.
-    """
-    self.throttle.Sleep()
-    self.AddRequest(BANDWIDTH_UP, req)
-    return req
-
-  def https_request(self, req):
-    """Process an HTTPS request.
-
-    If the throttle is over quota, sleep first.  Then add request size to
-    throttle before returning it to be sent.
-
-    Args:
-      req: A urllib2.Request object.
-
-    Returns:
-      The request passed in.
-    """
-    self.throttle.Sleep()
-    self.AddRequest(HTTPS_BANDWIDTH_UP, req)
-    return req
-
-  def http_response(self, unused_req, res):
-    """Process an HTTP response.
-
-    The size of the response is added to the bandwidth throttle and the request
-    throttle is incremented by one.
-
-    Args:
-      unused_req: The urllib2 request for this response.
-      res: A urllib2 response object.
-
-    Returns:
-      The response passed in.
-    """
-    self.AddResponse(BANDWIDTH_DOWN, res)
-    self.throttle.AddTransfer(REQUESTS, 1)
-    return res
-
-  def https_response(self, unused_req, res):
-    """Process an HTTPS response.
-
-    The size of the response is added to the bandwidth throttle and the request
-    throttle is incremented by one.
-
-    Args:
-      unused_req: The urllib2 request for this response.
-      res: A urllib2 response object.
-
-    Returns:
-      The response passed in.
-    """
-    self.AddResponse(HTTPS_BANDWIDTH_DOWN, res)
-    self.throttle.AddTransfer(HTTPS_REQUESTS, 1)
-    return res
-
-
-class ThrottledHttpRpcServer(appengine_rpc.HttpRpcServer):
-  """Provides a simplified RPC-style interface for HTTP requests.
-
-  This RPC server uses a Throttle to prevent exceeding quotas.
-  """
-
-  def __init__(self, throttle, request_manager, *args, **kwargs):
-    """Initialize a ThrottledHttpRpcServer.
-
-    Also sets request_manager.rpc_server to the ThrottledHttpRpcServer instance.
-
-    Args:
-      throttle: A Throttles instance.
-      request_manager: A RequestManager instance.
-      args: Positional arguments to pass through to
-        appengine_rpc.HttpRpcServer.__init__
-      kwargs: Keyword arguments to pass through to
-        appengine_rpc.HttpRpcServer.__init__
-    """
-    self.throttle = throttle
-    appengine_rpc.HttpRpcServer.__init__(self, *args, **kwargs)
-    request_manager.rpc_server = self
-
-  def _GetOpener(self):
-    """Returns an OpenerDirector that supports cookies and ignores redirects.
-
-    Returns:
-      A urllib2.OpenerDirector object.
-    """
-    opener = appengine_rpc.HttpRpcServer._GetOpener(self)
-    opener.add_handler(ThrottleHandler(self.throttle))
-
-    return opener
-
-
-def ThrottledHttpRpcServerFactory(throttle, request_manager):
-  """Create a factory to produce ThrottledHttpRpcServer for a given throttle.
-
-  Args:
-    throttle: A Throttle instance to use for the ThrottledHttpRpcServer.
-    request_manager: A RequestManager instance.
-
-  Returns:
-    A factory to produce a ThrottledHttpRpcServer.
-  """
-
-  def MakeRpcServer(*args, **kwargs):
-    """Factory to produce a ThrottledHttpRpcServer.
-
-    Args:
-      args: Positional args to pass to ThrottledHttpRpcServer.
-      kwargs: Keyword args to pass to ThrottledHttpRpcServer.
-
-    Returns:
-      A ThrottledHttpRpcServer instance.
-    """
-    kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
-    kwargs['save_cookies'] = True
-    return ThrottledHttpRpcServer(throttle, request_manager, *args, **kwargs)
-  return MakeRpcServer
-
-
-class ExportResult(object):
-  """Holds the decoded content for the result of an export requests."""
+      key_range = KeyRange()
+
+      yield self.key_range_item_factory(self.request_manager,
+                                        self.progress_queue,
+                                        self.kind,
+                                        key_range)
+
+
+class DownloadResult(object):
+  """Holds the result of an entity download."""
 
   def __init__(self, continued, direction, keys, entities):
     self.continued = continued
@@ -905,21 +590,31 @@
     self.entities = entities
     self.count = len(keys)
     assert self.count == len(entities)
-    assert direction in (KeyRange.ASC, KeyRange.DESC)
+    assert direction in (key_range_module.KeyRange.ASC,
+                         key_range_module.KeyRange.DESC)
     if self.count > 0:
-      if direction == KeyRange.ASC:
+      if direction == key_range_module.KeyRange.ASC:
         self.key_start = keys[0]
         self.key_end = keys[-1]
       else:
         self.key_start = keys[-1]
         self.key_end = keys[0]
 
+  def Entities(self):
+    """Returns the list of entities for this result in key order."""
+    if self.direction == key_range_module.KeyRange.ASC:
+      return list(self.entities)
+    else:
+      result = list(self.entities)
+      result.reverse()
+      return result
+
   def __str__(self):
     return 'continued = %s\n%s' % (
-        str(self.continued), '\n'.join(self.entities))
-
-
-class _WorkItem(object):
+        str(self.continued), '\n'.join(str(self.entities)))
+
+
+class _WorkItem(adaptive_thread_pool.WorkItem):
   """Holds a description of a unit of upload or download work."""
 
   def __init__(self, progress_queue, key_start, key_end, state_namer,
@@ -928,20 +623,101 @@
 
     Args:
       progress_queue: A queue used for tracking progress information.
-      key_start: The starting key, inclusive.
-      key_end: The ending key, inclusive.
+      key_start: The start key of the work item.
+      key_end: The end key of the work item.
       state_namer: Function to describe work item states.
       state: The initial state of the work item.
       progress_key: If this WorkItem represents state from a prior run,
         then this will be the key within the progress database.
     """
+    adaptive_thread_pool.WorkItem.__init__(self,
+                                           '[%s-%s]' % (key_start, key_end))
     self.progress_queue = progress_queue
-    self.key_start = key_start
-    self.key_end = key_end
     self.state_namer = state_namer
     self.state = state
     self.progress_key = progress_key
     self.progress_event = threading.Event()
+    self.key_start = key_start
+    self.key_end = key_end
+    self.error = None
+    self.traceback = None
+
+  def _TransferItem(self, thread_pool):
+    raise NotImplementedError()
+
+  def SetError(self):
+    """Sets the error and traceback information for this thread.
+
+    This must be called from an exception handler.
+    """
+    if not self.error:
+      exc_info = sys.exc_info()
+      self.error = exc_info[1]
+      self.traceback = exc_info[2]
+
+  def PerformWork(self, thread_pool):
+    """Perform the work of this work item and report the results.
+
+    Args:
+      thread_pool: An AdaptiveThreadPool instance.
+
+    Returns:
+      A tuple (status, instruction) of the work status and an instruction
+      for the ThreadGate.
+    """
+    status = adaptive_thread_pool.WorkItem.FAILURE
+    instruction = adaptive_thread_pool.ThreadGate.DECREASE
+
+    try:
+      self.MarkAsTransferring()
+
+      try:
+        transfer_time = self._TransferItem(thread_pool)
+        if transfer_time is None:
+          status = adaptive_thread_pool.WorkItem.RETRY
+          instruction = adaptive_thread_pool.ThreadGate.HOLD
+        else:
+          logger.debug('[%s] %s Transferred %d entities in %0.1f seconds',
+                       threading.currentThread().getName(), self, self.count,
+                       transfer_time)
+          sys.stdout.write('.')
+          sys.stdout.flush()
+          status = adaptive_thread_pool.WorkItem.SUCCESS
+          if transfer_time <= MAXIMUM_INCREASE_DURATION:
+            instruction = adaptive_thread_pool.ThreadGate.INCREASE
+          elif transfer_time <= MAXIMUM_HOLD_DURATION:
+            instruction = adaptive_thread_pool.ThreadGate.HOLD
+      except (db.InternalError, db.NotSavedError, db.Timeout,
+              db.TransactionFailedError,
+              apiproxy_errors.OverQuotaError,
+              apiproxy_errors.DeadlineExceededError,
+              apiproxy_errors.ApplicationError), e:
+        status = adaptive_thread_pool.WorkItem.RETRY
+        logger.exception('Retrying on non-fatal datastore error: %s', e)
+      except urllib2.HTTPError, e:
+        http_status = e.code
+        if http_status == 403 or (http_status >= 500 and http_status < 600):
+          status = adaptive_thread_pool.WorkItem.RETRY
+          logger.exception('Retrying on non-fatal HTTP error: %d %s',
+                           http_status, e.msg)
+        else:
+          self.SetError()
+          status = adaptive_thread_pool.WorkItem.FAILURE
+      except urllib2.URLError, e:
+        if IsURLErrorFatal(e):
+          self.SetError()
+          status = adaptive_thread_pool.WorkItem.FAILURE
+        else:
+          status = adaptive_thread_pool.WorkItem.RETRY
+          logger.exception('Retrying on non-fatal URL error: %s', e.reason)
+
+    finally:
+      if status == adaptive_thread_pool.WorkItem.SUCCESS:
+        self.MarkAsTransferred()
+      else:
+        self.MarkAsError()
+
+    return (status, instruction)
 
   def _AssertInState(self, *states):
     """Raises an Error if the state of this range is not in states."""
@@ -963,7 +739,7 @@
 
   def MarkAsTransferring(self):
     """Mark this _WorkItem as transferring, updating the progress database."""
-    self._AssertInState(STATE_READ, STATE_NOT_GOT)
+    self._AssertInState(STATE_READ, STATE_ERROR)
     self._AssertProgressKey()
     self._StateTransition(STATE_GETTING, blocking=True)
 
@@ -975,7 +751,7 @@
     """Mark this _WorkItem as failed, updating the progress database."""
     self._AssertInState(STATE_GETTING)
     self._AssertProgressKey()
-    self._StateTransition(STATE_NOT_GOT, blocking=True)
+    self._StateTransition(STATE_ERROR, blocking=True)
 
   def _StateTransition(self, new_state, blocking=False):
     """Transition the work item to a new state, storing progress information.
@@ -998,12 +774,12 @@
 
 
 
-class WorkItem(_WorkItem):
+class UploadWorkItem(_WorkItem):
   """Holds a unit of uploading work.
 
-  A WorkItem represents a number of entities that need to be uploaded to
+  A UploadWorkItem represents a number of entities that need to be uploaded to
   Google App Engine. These entities are encoded in the "content" field of
-  the WorkItem, and will be POST'd as-is to the server.
+  the UploadWorkItem, and will be POST'd as-is to the server.
 
   The entities are identified by a range of numeric keys, inclusively. In
   the case of a resumption of an upload, or a replay to correct errors,
@@ -1013,16 +789,17 @@
   fill the entire range, they must simply bound a range of valid keys.
   """
 
-  def __init__(self, progress_queue, rows, key_start, key_end,
+  def __init__(self, request_manager, progress_queue, rows, key_start, key_end,
                progress_key=None):
-    """Initialize the WorkItem instance.
+    """Initialize the UploadWorkItem instance.
 
     Args:
+      request_manager: A RequestManager instance.
       progress_queue: A queue used for tracking progress information.
       rows: A list of pairs of a line number and a list of column values
       key_start: The (numeric) starting key, inclusive.
       key_end: The (numeric) ending key, inclusive.
-      progress_key: If this WorkItem represents state from a prior run,
+      progress_key: If this UploadWorkItem represents state from a prior run,
         then this will be the key within the progress database.
     """
     _WorkItem.__init__(self, progress_queue, key_start, key_end,
@@ -1033,6 +810,7 @@
     assert isinstance(key_end, (int, long))
     assert key_start <= key_end
 
+    self.request_manager = request_manager
     self.rows = rows
     self.content = None
     self.count = len(rows)
@@ -1040,8 +818,24 @@
   def __str__(self):
     return '[%s-%s]' % (self.key_start, self.key_end)
 
+  def _TransferItem(self, thread_pool, get_time=time.time):
+    """Transfers the entities associated with an item.
+
+    Args:
+      thread_pool: An AdaptiveThreadPool instance.
+      get_time: Used for dependency injection.
+    """
+    t = get_time()
+    if not self.content:
+      self.content = self.request_manager.EncodeContent(self.rows)
+    try:
+      self.request_manager.PostEntities(self.content)
+    except:
+      raise
+    return get_time() - t
+
   def MarkAsTransferred(self):
-    """Mark this WorkItem as sucessfully-sent to the server."""
+    """Mark this UploadWorkItem as sucessfully-sent to the server."""
 
     self._AssertInState(STATE_SENDING)
     self._AssertProgressKey()
@@ -1068,45 +862,31 @@
     implementation_class = db.class_for_kind(kind_or_class_key)
   return implementation_class
 
-class EmptyQuery(db.Query):
-  def get(self):
-    return None
-
-  def fetch(self, limit=1000, offset=0):
-    return []
-
-  def count(self, limit=1000):
-    return 0
-
 
 def KeyLEQ(key1, key2):
   """Compare two keys for less-than-or-equal-to.
 
-  All keys with numeric ids come before all keys with names.
+  All keys with numeric ids come before all keys with names. None represents
+  an unbounded end-point so it is both greater and less than any other key.
 
   Args:
-    key1: An int or db.Key instance.
-    key2: An int or db.Key instance.
+    key1: An int or datastore.Key instance.
+    key2: An int or datastore.Key instance.
 
   Returns:
     True if key1 <= key2
   """
-  if isinstance(key1, int) and isinstance(key2, int):
-    return key1 <= key2
   if key1 is None or key2 is None:
     return True
-  if key1.id() and not key2.id():
-    return True
-  return key1.id_or_name() <= key2.id_or_name()
-
-
-class KeyRange(_WorkItem):
-  """Represents an item of download work.
-
-  A KeyRange object represents a key range (key_start, key_end) and a
-  scan direction (KeyRange.DESC or KeyRange.ASC).  The KeyRange object
-  has an associated state: STATE_READ, STATE_GETTING, STATE_GOT, and
-  STATE_ERROR.
+  return key1 <= key2
+
+
+class KeyRangeItem(_WorkItem):
+  """Represents an item of work that scans over a key range.
+
+  A KeyRangeItem object represents holds a KeyRange
+  and has an associated state: STATE_READ, STATE_GETTING, STATE_GOT,
+  and STATE_ERROR.
 
   - STATE_READ indicates the range ready to be downloaded by a worker thread.
   - STATE_GETTING indicates the range is currently being downloaded.
@@ -1114,280 +894,143 @@
   - STATE_ERROR indicates that an error occurred during the last download
     attempt
 
-  KeyRanges not in the STATE_GOT state are stored in the progress database.
-  When a piece of KeyRange work is downloaded, the download may cover only
-  a portion of the range.  In this case, the old KeyRange is removed from
+  KeyRangeItems not in the STATE_GOT state are stored in the progress database.
+  When a piece of KeyRangeItem work is downloaded, the download may cover only
+  a portion of the range.  In this case, the old KeyRangeItem is removed from
   the progress database and ranges covering the undownloaded range are
   generated and stored as STATE_READ in the export progress database.
   """
 
-  DESC = 0
-  ASC = 1
-
-  MAX_KEY_LEN = 500
-
   def __init__(self,
+               request_manager,
                progress_queue,
                kind,
-               direction,
-               key_start=None,
-               key_end=None,
-               include_start=True,
-               include_end=True,
+               key_range,
                progress_key=None,
                state=STATE_READ):
-    """Initialize a KeyRange object.
+    """Initialize a KeyRangeItem object.
 
     Args:
+      request_manager: A RequestManager instance.
       progress_queue: A queue used for tracking progress information.
       kind: The kind of entities for this range.
-      direction: The direction of the query for this range.
-      key_start: The starting key for this range.
-      key_end: The ending key for this range.
-      include_start: Whether the start key should be included in the range.
-      include_end: Whether the end key should be included in the range.
+      key_range: A KeyRange instance for this work item.
       progress_key: The key for this range within the progress database.
       state: The initial state of this range.
-
-    Raises:
-      KeyRangeError: if key_start is None.
     """
-    assert direction in (KeyRange.ASC, KeyRange.DESC)
-    _WorkItem.__init__(self, progress_queue, key_start, key_end,
-                       ExportStateName, state=state, progress_key=progress_key)
+    _WorkItem.__init__(self, progress_queue, key_range.key_start,
+                       key_range.key_end, ExportStateName, state=state,
+                       progress_key=progress_key)
+    self.request_manager = request_manager
     self.kind = kind
-    self.direction = direction
-    self.export_result = None
+    self.key_range = key_range
+    self.download_result = None
     self.count = 0
-    self.include_start = include_start
-    self.include_end = include_end
-    self.SPLIT_KEY = db.Key.from_path(self.kind, unichr(0))
+    self.key_start = key_range.key_start
+    self.key_end = key_range.key_end
 
   def __str__(self):
-    return '[%s-%s]' % (PrettyKey(self.key_start), PrettyKey(self.key_end))
+    return str(self.key_range)
 
   def __repr__(self):
     return self.__str__()
 
   def MarkAsTransferred(self):
-    """Mark this KeyRange as transferred, updating the progress database."""
+    """Mark this KeyRangeItem as transferred, updating the progress database."""
     pass
 
-  def Process(self, export_result, num_threads, batch_size, work_queue):
-    """Mark this KeyRange as success, updating the progress database.
-
-    Process will split this KeyRange based on the content of export_result and
-    adds the unfinished ranges to the work queue.
+  def Process(self, download_result, thread_pool, batch_size,
+              new_state=STATE_GOT):
+    """Mark this KeyRangeItem as success, updating the progress database.
+
+    Process will split this KeyRangeItem based on the content of
+    download_result and adds the unfinished ranges to the work queue.
 
     Args:
-      export_result: An ExportResult instance.
-      num_threads: The number of threads for parallel transfers.
+      download_result: A DownloadResult instance.
+      thread_pool: An AdaptiveThreadPool instance.
       batch_size: The number of entities to transfer per request.
-      work_queue: The work queue to add unfinished ranges to.
-
-    Returns:
-      A list of KeyRanges representing undownloaded datastore key ranges.
+      new_state: The state to transition the completed range to.
     """
     self._AssertInState(STATE_GETTING)
     self._AssertProgressKey()
 
-    self.export_result = export_result
-    self.count = len(export_result.keys)
-    if export_result.continued:
-      self._FinishedRange()._StateTransition(STATE_GOT, blocking=True)
-      self._AddUnfinishedRanges(num_threads, batch_size, work_queue)
+    self.download_result = download_result
+    self.count = len(download_result.keys)
+    if download_result.continued:
+      self._FinishedRange()._StateTransition(new_state, blocking=True)
+      self._AddUnfinishedRanges(thread_pool, batch_size)
     else:
-      self._StateTransition(STATE_GOT, blocking=True)
+      self._StateTransition(new_state, blocking=True)
 
   def _FinishedRange(self):
-    """Returns the range completed by the export_result.
-
-    Returns:
-      A KeyRange representing a completed range.
-    """
-    assert self.export_result is not None
-
-    if self.direction == KeyRange.ASC:
-      key_start = self.key_start
-      if self.export_result.continued:
-        key_end = self.export_result.key_end
-      else:
-        key_end = self.key_end
-    else:
-      key_end = self.key_end
-      if self.export_result.continued:
-        key_start = self.export_result.key_start
-      else:
-        key_start = self.key_start
-
-    result = KeyRange(self.progress_queue,
-                      self.kind,
-                      key_start=key_start,
-                      key_end=key_end,
-                      direction=self.direction)
-
-    result.progress_key = self.progress_key
-    result.export_result = self.export_result
-    result.state = self.state
-    result.count = self.count
-    return result
-
-  def FilterQuery(self, query):
-    """Add query filter to restrict to this key range.
-
-    Args:
-      query: A db.Query instance.
-    """
-    if self.key_start == self.key_end and not (
-        self.include_start or self.include_end):
-      return EmptyQuery()
-    if self.include_start:
-      start_comparator = '>='
-    else:
-      start_comparator = '>'
-    if self.include_end:
-      end_comparator = '<='
-    else:
-      end_comparator = '<'
-    if self.key_start and self.key_end:
-      query.filter('__key__ %s' % start_comparator, self.key_start)
-      query.filter('__key__ %s' % end_comparator, self.key_end)
-    elif self.key_start:
-      query.filter('__key__ %s' % start_comparator, self.key_start)
-    elif self.key_end:
-      query.filter('__key__ %s' % end_comparator, self.key_end)
-
-    return query
-
-  def MakeParallelQuery(self):
-    """Construct a query for this key range, for parallel downloading.
-
-    Returns:
-      A db.Query instance.
-
-    Raises:
-      KeyRangeError: if self.direction is not one of
-        KeyRange.ASC, KeyRange.DESC
-    """
-    if self.direction == KeyRange.ASC:
-      direction = ''
-    elif self.direction == KeyRange.DESC:
-      direction = '-'
-    else:
-      raise KeyRangeError('KeyRange direction unexpected: %s', self.direction)
-    query = db.Query(GetImplementationClass(self.kind))
-    query.order('%s__key__' % direction)
-
-    return self.FilterQuery(query)
-
-  def MakeSerialQuery(self):
-    """Construct a query for this key range without descending __key__ scan.
+    """Returns the range completed by the download_result.
 
     Returns:
-      A db.Query instance.
+      A KeyRangeItem representing a completed range.
     """
-    query = db.Query(GetImplementationClass(self.kind))
-    query.order('__key__')
-
-    return self.FilterQuery(query)
-
-  def _BisectStringRange(self, start, end):
-    if start == end:
-      return (start, start, end)
-    start += '\0'
-    end += '\0'
-    midpoint = []
-    expected_max = 127
-    for i in xrange(min(len(start), len(end))):
-      if start[i] == end[i]:
-        midpoint.append(start[i])
+    assert self.download_result is not None
+
+    if self.key_range.direction == key_range_module.KeyRange.ASC:
+      key_start = self.key_range.key_start
+      if self.download_result.continued:
+        key_end = self.download_result.key_end
       else:
-        ord_sum = ord(start[i]) + ord(end[i])
-        midpoint.append(unichr(ord_sum / 2))
-        if ord_sum % 2:
-          if len(start) > i + 1:
-            ord_start = ord(start[i+1])
-          else:
-            ord_start = 0
-          if ord_start < expected_max:
-            ord_split = (expected_max + ord_start) / 2
-          else:
-            ord_split = (0xFFFF + ord_start) / 2
-          midpoint.append(unichr(ord_split))
-        break
-    return (start[:-1], ''.join(midpoint), end[:-1])
-
-  def SplitRange(self, key_start, include_start, key_end, include_end,
-                 export_result, num_threads, batch_size, work_queue):
-    """Split the key range [key_start, key_end] into a list of ranges."""
-    if export_result.direction == KeyRange.ASC:
-      key_start = export_result.key_end
-      include_start = False
+        key_end = self.key_range.key_end
     else:
-      key_end = export_result.key_start
-      include_end = False
-    key_pairs = []
-    if not key_start:
-      key_pairs.append((key_start, include_start, key_end, include_end,
-                        KeyRange.ASC))
-    elif not key_end:
-      key_pairs.append((key_start, include_start, key_end, include_end,
-                        KeyRange.DESC))
-    elif work_queue.qsize() > 2 * num_threads:
-      key_pairs.append((key_start, include_start, key_end, include_end,
-                        KeyRange.ASC))
-    elif key_start.id() and key_end.id():
-      if key_end.id() - key_start.id() > batch_size:
-        key_half = db.Key.from_path(self.kind,
-                                    (key_start.id() + key_end.id()) / 2)
-        key_pairs.append((key_start, include_start,
-                          key_half, True,
-                          KeyRange.DESC))
-        key_pairs.append((key_half, False,
-                          key_end, include_end,
-                          KeyRange.ASC))
+      key_end = self.key_range.key_end
+      if self.download_result.continued:
+        key_start = self.download_result.key_start
       else:
-        key_pairs.append((key_start, include_start, key_end, include_end,
-                          KeyRange.ASC))
-    elif key_start.name() and key_end.name():
-      (start, middle, end) = self._BisectStringRange(key_start.name(),
-                                                     key_end.name())
-      key_pairs.append((key_start, include_start,
-                        db.Key.from_path(self.kind, middle), True,
-                        KeyRange.DESC))
-      key_pairs.append((db.Key.from_path(self.kind, middle), False,
-                        key_end, include_end,
-                        KeyRange.ASC))
+        key_start = self.key_range.key_start
+
+    key_range = KeyRange(key_start=key_start,
+                         key_end=key_end,
+                         direction=self.key_range.direction)
+
+    result = self.__class__(self.request_manager,
+                            self.progress_queue,
+                            self.kind,
+                            key_range,
+                            progress_key=self.progress_key,
+                            state=self.state)
+
+    result.download_result = self.download_result
+    result.count = self.count
+    return result
+
+  def _SplitAndAddRanges(self, thread_pool, batch_size):
+    """Split the key range [key_start, key_end] into a list of ranges."""
+    if self.download_result.direction == key_range_module.KeyRange.ASC:
+      key_range = KeyRange(
+          key_start=self.download_result.key_end,
+          key_end=self.key_range.key_end,
+          include_start=False)
     else:
-      assert key_start.id() and key_end.name()
-      key_pairs.append((key_start, include_start,
-                        self.SPLIT_KEY, False,
-                        KeyRange.DESC))
-      key_pairs.append((self.SPLIT_KEY, True,
-                        key_end, include_end,
-                        KeyRange.ASC))
-
-    ranges = [KeyRange(self.progress_queue,
-                       self.kind,
-                       key_start=start,
-                       include_start=include_start,
-                       key_end=end,
-                       include_end=include_end,
-                       direction=direction)
-              for (start, include_start, end, include_end, direction)
-              in key_pairs]
+      key_range = KeyRange(
+          key_start=self.key_range.key_start,
+          key_end=self.download_result.key_start,
+          include_end=False)
+
+    if thread_pool.QueuedItemCount() > 2 * thread_pool.num_threads():
+      ranges = [key_range]
+    else:
+      ranges = key_range.split_range(batch_size=batch_size)
 
     for key_range in ranges:
-      key_range.MarkAsRead()
-      work_queue.put(key_range, block=True)
-
-  def _AddUnfinishedRanges(self, num_threads, batch_size, work_queue):
-    """Adds incomplete KeyRanges to the work_queue.
+      key_range_item = self.__class__(self.request_manager,
+                                      self.progress_queue,
+                                      self.kind,
+                                      key_range)
+      key_range_item.MarkAsRead()
+      thread_pool.SubmitItem(key_range_item, block=True)
+
+  def _AddUnfinishedRanges(self, thread_pool, batch_size):
+    """Adds incomplete KeyRanges to the thread_pool.
 
     Args:
-      num_threads: The number of threads for parallel transfers.
+      thread_pool: An AdaptiveThreadPool instance.
       batch_size: The number of entities to transfer per request.
-      work_queue: The work queue to add unfinished ranges to.
 
     Returns:
       A list of KeyRanges representing incomplete datastore key ranges.
@@ -1395,15 +1038,43 @@
     Raises:
       KeyRangeError: if this key range has already been completely transferred.
     """
-    assert self.export_result is not None
-    if self.export_result.continued:
-      self.SplitRange(self.key_start, self.include_start, self.key_end,
-                      self.include_end, self.export_result,
-                      num_threads, batch_size, work_queue)
+    assert self.download_result is not None
+    if self.download_result.continued:
+      self._SplitAndAddRanges(thread_pool, batch_size)
     else:
       raise KeyRangeError('No unfinished part of key range.')
 
 
+class DownloadItem(KeyRangeItem):
+  """A KeyRangeItem for downloading key ranges."""
+
+  def _TransferItem(self, thread_pool, get_time=time.time):
+    """Transfers the entities associated with an item."""
+    t = get_time()
+    download_result = self.request_manager.GetEntities(self)
+    transfer_time = get_time() - t
+    self.Process(download_result, thread_pool,
+                 self.request_manager.batch_size)
+    return transfer_time
+
+
+class MapperItem(KeyRangeItem):
+  """A KeyRangeItem for mapping over key ranges."""
+
+  def _TransferItem(self, thread_pool, get_time=time.time):
+    t = get_time()
+    download_result = self.request_manager.GetEntities(self)
+    transfer_time = get_time() - t
+    mapper = self.request_manager.GetMapper()
+    try:
+      mapper.batch_apply(download_result.Entities())
+    except MapperRetry:
+      return None
+    self.Process(download_result, thread_pool,
+                 self.request_manager.batch_size)
+    return transfer_time
+
+
 class RequestManager(object):
   """A class which wraps a connection to the server."""
 
@@ -1416,7 +1087,8 @@
                batch_size,
                secure,
                email,
-               passin):
+               passin,
+               dry_run=False):
     """Initialize a RequestManager object.
 
     Args:
@@ -1445,23 +1117,39 @@
     self.parallel_download = True
     self.email = email
     self.passin = passin
-    throttled_rpc_server_factory = ThrottledHttpRpcServerFactory(
-        self.throttle, self)
+    self.mapper = None
+    self.dry_run = dry_run
+
+    if self.dry_run:
+      logger.info('Running in dry run mode, skipping remote_api setup')
+      return
+
     logger.debug('Configuring remote_api. url_path = %s, '
                  'servername = %s' % (url_path, host_port))
+
+    def CookieHttpRpcServer(*args, **kwargs):
+      kwargs['save_cookies'] = True
+      kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
+      return appengine_rpc.HttpRpcServer(*args, **kwargs)
+
     remote_api_stub.ConfigureRemoteDatastore(
         app_id,
         url_path,
         self.AuthFunction,
         servername=host_port,
-        rpc_server_factory=throttled_rpc_server_factory,
+        rpc_server_factory=CookieHttpRpcServer,
         secure=self.secure)
+    remote_api_throttle.ThrottleRemoteDatastore(self.throttle)
     logger.debug('Bulkloader using app_id: %s', os.environ['APPLICATION_ID'])
 
   def Authenticate(self):
     """Invoke authentication if necessary."""
-    logger.info('Connecting to %s', self.url_path)
-    self.rpc_server.Send(self.url_path, payload=None)
+    logger.info('Connecting to %s%s', self.host_port, self.url_path)
+    if self.dry_run:
+      self.authenticated = True
+      return
+
+    remote_api_stub.MaybeInvokeAuthentication()
     self.authenticated = True
 
   def AuthFunction(self,
@@ -1506,7 +1194,7 @@
       loader: Used for dependency injection.
 
     Returns:
-      A list of db.Model instances.
+      A list of datastore.Entity instances.
 
     Raises:
       ConfigurationError: if no loader is defined for self.kind
@@ -1520,77 +1208,112 @@
     entities = []
     for line_number, values in rows:
       key = loader.generate_key(line_number, values)
-      if isinstance(key, db.Key):
+      if isinstance(key, datastore.Key):
         parent = key.parent()
         key = key.name()
       else:
         parent = None
       entity = loader.create_entity(values, key_name=key, parent=parent)
+
+      def ToEntity(entity):
+        if isinstance(entity, db.Model):
+          return entity._populate_entity()
+        else:
+          return entity
+
       if isinstance(entity, list):
-        entities.extend(entity)
+        entities.extend(map(ToEntity, entity))
       elif entity:
-        entities.append(entity)
+        entities.append(ToEntity(entity))
 
     return entities
 
-  def PostEntities(self, item):
+  def PostEntities(self, entities):
     """Posts Entity records to a remote endpoint over HTTP.
 
     Args:
-      item: A workitem containing the entities to post.
-
-    Returns:
-      A pair of the estimated size of the request in bytes and the response
-        from the server as a str.
+      entities: A list of datastore entities.
     """
-    entities = item.content
-    db.put(entities)
-
-  def GetEntities(self, key_range):
+    if self.dry_run:
+      return
+    datastore.Put(entities)
+
+  def _QueryForPbs(self, query):
+    """Perform the given query and return a list of entity_pb's."""
+    try:
+      query_pb = query._ToPb(limit=self.batch_size)
+      result_pb = datastore_pb.QueryResult()
+      apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', query_pb,
+                                     result_pb)
+      next_pb = datastore_pb.NextRequest()
+      next_pb.set_count(self.batch_size)
+      next_pb.mutable_cursor().CopyFrom(result_pb.cursor())
+      result_pb = datastore_pb.QueryResult()
+      apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next', next_pb, result_pb)
+      return result_pb.result_list()
+    except apiproxy_errors.ApplicationError, e:
+      raise datastore._ToDatastoreError(e)
+
+  def GetEntities(self, key_range_item, key_factory=datastore.Key):
     """Gets Entity records from a remote endpoint over HTTP.
 
     Args:
-     key_range: Range of keys to get.
+     key_range_item: Range of keys to get.
+     key_factory: Used for dependency injection.
 
     Returns:
-      An ExportResult instance.
+      A DownloadResult instance.
 
     Raises:
       ConfigurationError: if no Exporter is defined for self.kind
     """
-    try:
-      Exporter.RegisteredExporter(self.kind)
-    except KeyError:
-      raise ConfigurationError('No Exporter defined for kind %s.' % self.kind)
-
     keys = []
     entities = []
 
     if self.parallel_download:
-      query = key_range.MakeParallelQuery()
+      query = key_range_item.key_range.make_directed_datastore_query(self.kind)
       try:
-        results = query.fetch(self.batch_size)
+        results = self._QueryForPbs(query)
       except datastore_errors.NeedIndexError:
         logger.info('%s: No descending index on __key__, '
                     'performing serial download', self.kind)
         self.parallel_download = False
 
     if not self.parallel_download:
-      key_range.direction = KeyRange.ASC
-      query = key_range.MakeSerialQuery()
-      results = query.fetch(self.batch_size)
+      key_range_item.key_range.direction = key_range_module.KeyRange.ASC
+      query = key_range_item.key_range.make_ascending_datastore_query(self.kind)
+      results = self._QueryForPbs(query)
 
     size = len(results)
 
-    for model in results:
-      key = model.key()
-      entities.append(cPickle.dumps(model))
+    for entity in results:
+      key = key_factory()
+      key._Key__reference = entity.key()
+      entities.append(entity)
       keys.append(key)
 
     continued = (size == self.batch_size)
-    key_range.count = size
-
-    return ExportResult(continued, key_range.direction, keys, entities)
+    key_range_item.count = size
+
+    return DownloadResult(continued, key_range_item.key_range.direction,
+                          keys, entities)
+
+  def GetMapper(self):
+    """Returns a mapper for the registered kind.
+
+    Returns:
+      A Mapper instance.
+
+    Raises:
+      ConfigurationError: if no Mapper is defined for self.kind
+    """
+    if not self.mapper:
+      try:
+        self.mapper = Mapper.RegisteredMapper(self.kind)
+      except KeyError:
+        logger.error('No Mapper defined for kind %s.' % self.kind)
+        raise ConfigurationError('No Mapper defined for kind %s.' % self.kind)
+    return self.mapper
 
 
 def InterruptibleSleep(sleep_time):
@@ -1611,357 +1334,6 @@
       return
 
 
-class ThreadGate(object):
-  """Manage the number of active worker threads.
-
-  The ThreadGate limits the number of threads that are simultaneously
-  uploading batches of records in order to implement adaptive rate
-  control.  The number of simultaneous upload threads that it takes to
-  start causing timeout varies widely over the course of the day, so
-  adaptive rate control allows the uploader to do many uploads while
-  reducing the error rate and thus increasing the throughput.
-
-  Initially the ThreadGate allows only one uploader thread to be active.
-  For each successful upload, another thread is activated and for each
-  failed upload, the number of active threads is reduced by one.
-  """
-
-  def __init__(self, enabled,
-               threshhold1=MAXIMUM_INCREASE_DURATION,
-               threshhold2=MAXIMUM_HOLD_DURATION,
-               sleep=InterruptibleSleep):
-    """Constructor for ThreadGate instances.
-
-    Args:
-      enabled: Whether the thread gate is enabled
-      threshhold1: Maximum duration (in seconds) for a transfer to increase
-        the number of active threads.
-      threshhold2: Maximum duration (in seconds) for a transfer to not decrease
-        the number of active threads.
-    """
-    self.enabled = enabled
-    self.enabled_count = 1
-    self.lock = threading.Lock()
-    self.thread_semaphore = threading.Semaphore(self.enabled_count)
-    self._threads = []
-    self.backoff_time = 0
-    self.sleep = sleep
-    self.threshhold1 = threshhold1
-    self.threshhold2 = threshhold2
-
-  def Register(self, thread):
-    """Register a thread with the thread gate."""
-    self._threads.append(thread)
-
-  def Threads(self):
-    """Yields the registered threads."""
-    for thread in self._threads:
-      yield thread
-
-  def EnableThread(self):
-    """Enable one more worker thread."""
-    self.lock.acquire()
-    try:
-      self.enabled_count += 1
-    finally:
-      self.lock.release()
-    self.thread_semaphore.release()
-
-  def EnableAllThreads(self):
-    """Enable all worker threads."""
-    for unused_idx in xrange(len(self._threads) - self.enabled_count):
-      self.EnableThread()
-
-  def StartWork(self):
-    """Starts a critical section in which the number of workers is limited.
-
-    If thread throttling is enabled then this method starts a critical
-    section which allows self.enabled_count simultaneously operating
-    threads. The critical section is ended by calling self.FinishWork().
-    """
-    if self.enabled:
-      self.thread_semaphore.acquire()
-      if self.backoff_time > 0.0:
-        if not threading.currentThread().exit_flag:
-          logger.info('Backing off: %.1f seconds',
-                      self.backoff_time)
-        self.sleep(self.backoff_time)
-
-  def FinishWork(self):
-    """Ends a critical section started with self.StartWork()."""
-    if self.enabled:
-      self.thread_semaphore.release()
-
-  def TransferSuccess(self, duration):
-    """Informs the throttler that an item was successfully sent.
-
-    If thread throttling is enabled and the duration is low enough, this
-    method will cause an additional thread to run in the critical section.
-
-    Args:
-      duration: The duration of the transfer in seconds.
-    """
-    if duration > self.threshhold2:
-      logger.debug('Transfer took %s, decreasing workers.', duration)
-      self.DecreaseWorkers(backoff=False)
-      return
-    elif duration > self.threshhold1:
-      logger.debug('Transfer took %s, not increasing workers.', duration)
-      return
-    elif self.enabled:
-      if self.backoff_time > 0.0:
-        logger.info('Resetting backoff to 0.0')
-        self.backoff_time = 0.0
-      do_enable = False
-      self.lock.acquire()
-      try:
-        if self.enabled and len(self._threads) > self.enabled_count:
-          do_enable = True
-          self.enabled_count += 1
-      finally:
-        self.lock.release()
-      if do_enable:
-        logger.debug('Increasing active thread count to %d',
-                     self.enabled_count)
-        self.thread_semaphore.release()
-
-  def DecreaseWorkers(self, backoff=True):
-    """Informs the thread_gate that an item failed to send.
-
-    If thread throttling is enabled, this method will cause the
-    throttler to allow one fewer thread in the critical section. If
-    there is only one thread remaining, failures will result in
-    exponential backoff until there is a success.
-
-    Args:
-      backoff: Whether to increase exponential backoff if there is only
-        one thread enabled.
-    """
-    if self.enabled:
-      do_disable = False
-      self.lock.acquire()
-      try:
-        if self.enabled:
-          if self.enabled_count > 1:
-            do_disable = True
-            self.enabled_count -= 1
-          elif backoff:
-            if self.backoff_time == 0.0:
-              self.backoff_time = INITIAL_BACKOFF
-            else:
-              self.backoff_time *= BACKOFF_FACTOR
-      finally:
-        self.lock.release()
-      if do_disable:
-        logger.debug('Decreasing the number of active threads to %d',
-                     self.enabled_count)
-        self.thread_semaphore.acquire()
-
-
-class Throttle(object):
-  """A base class for upload rate throttling.
-
-  Transferring large number of records, too quickly, to an application
-  could trigger quota limits and cause the transfer process to halt.
-  In order to stay within the application's quota, we throttle the
-  data transfer to a specified limit (across all transfer threads).
-  This limit defaults to about half of the Google App Engine default
-  for an application, but can be manually adjusted faster/slower as
-  appropriate.
-
-  This class tracks a moving average of some aspect of the transfer
-  rate (bandwidth, records per second, http connections per
-  second). It keeps two windows of counts of bytes transferred, on a
-  per-thread basis. One block is the "current" block, and the other is
-  the "prior" block. It will rotate the counts from current to prior
-  when ROTATE_PERIOD has passed.  Thus, the current block will
-  represent from 0 seconds to ROTATE_PERIOD seconds of activity
-  (determined by: time.time() - self.last_rotate).  The prior block
-  will always represent a full ROTATE_PERIOD.
-
-  Sleeping is performed just before a transfer of another block, and is
-  based on the counts transferred *before* the next transfer. It really
-  does not matter how much will be transferred, but only that for all the
-  data transferred SO FAR that we have interspersed enough pauses to
-  ensure the aggregate transfer rate is within the specified limit.
-
-  These counts are maintained on a per-thread basis, so we do not require
-  any interlocks around incrementing the counts. There IS an interlock on
-  the rotation of the counts because we do not want multiple threads to
-  multiply-rotate the counts.
-
-  There are various race conditions in the computation and collection
-  of these counts. We do not require precise values, but simply to
-  keep the overall transfer within the bandwidth limits. If a given
-  pause is a little short, or a little long, then the aggregate delays
-  will be correct.
-  """
-
-  ROTATE_PERIOD = 600
-
-  def __init__(self,
-               get_time=time.time,
-               thread_sleep=InterruptibleSleep,
-               layout=None):
-    self.get_time = get_time
-    self.thread_sleep = thread_sleep
-
-    self.start_time = get_time()
-    self.transferred = {}
-    self.prior_block = {}
-    self.totals = {}
-    self.throttles = {}
-
-    self.last_rotate = {}
-    self.rotate_mutex = {}
-    if layout:
-      self.AddThrottles(layout)
-
-  def AddThrottle(self, name, limit):
-    self.throttles[name] = limit
-    self.transferred[name] = {}
-    self.prior_block[name] = {}
-    self.totals[name] = {}
-    self.last_rotate[name] = self.get_time()
-    self.rotate_mutex[name] = threading.Lock()
-
-  def AddThrottles(self, layout):
-    for key, value in layout.iteritems():
-      self.AddThrottle(key, value)
-
-  def Register(self, thread):
-    """Register this thread with the throttler."""
-    thread_name = thread.getName()
-    for throttle_name in self.throttles.iterkeys():
-      self.transferred[throttle_name][thread_name] = 0
-      self.prior_block[throttle_name][thread_name] = 0
-      self.totals[throttle_name][thread_name] = 0
-
-  def VerifyName(self, throttle_name):
-    if throttle_name not in self.throttles:
-      raise AssertionError('%s is not a registered throttle' % throttle_name)
-
-  def AddTransfer(self, throttle_name, token_count):
-    """Add a count to the amount this thread has transferred.
-
-    Each time a thread transfers some data, it should call this method to
-    note the amount sent. The counts may be rotated if sufficient time
-    has passed since the last rotation.
-
-    Note: this method should only be called by the BulkLoaderThread
-    instances. The token count is allocated towards the
-    "current thread".
-
-    Args:
-      throttle_name: The name of the throttle to add to.
-      token_count: The number to add to the throttle counter.
-    """
-    self.VerifyName(throttle_name)
-    transferred = self.transferred[throttle_name]
-    transferred[threading.currentThread().getName()] += token_count
-
-    if self.last_rotate[throttle_name] + self.ROTATE_PERIOD < self.get_time():
-      self._RotateCounts(throttle_name)
-
-  def Sleep(self, throttle_name=None):
-    """Possibly sleep in order to limit the transfer rate.
-
-    Note that we sleep based on *prior* transfers rather than what we
-    may be about to transfer. The next transfer could put us under/over
-    and that will be rectified *after* that transfer. Net result is that
-    the average transfer rate will remain within bounds. Spiky behavior
-    or uneven rates among the threads could possibly bring the transfer
-    rate above the requested limit for short durations.
-
-    Args:
-      throttle_name: The name of the throttle to sleep on.  If None or
-        omitted, then sleep on all throttles.
-    """
-    if throttle_name is None:
-      for throttle_name in self.throttles:
-        self.Sleep(throttle_name=throttle_name)
-      return
-
-    self.VerifyName(throttle_name)
-
-    thread = threading.currentThread()
-
-    while True:
-      duration = self.get_time() - self.last_rotate[throttle_name]
-
-      total = 0
-      for count in self.prior_block[throttle_name].values():
-        total += count
-
-      if total:
-        duration += self.ROTATE_PERIOD
-
-      for count in self.transferred[throttle_name].values():
-        total += count
-
-      sleep_time = (float(total) / self.throttles[throttle_name]) - duration
-
-      if sleep_time < MINIMUM_THROTTLE_SLEEP_DURATION:
-        break
-
-      logger.debug('[%s] Throttling on %s. Sleeping for %.1f ms '
-                   '(duration=%.1f ms, total=%d)',
-                   thread.getName(), throttle_name,
-                   sleep_time * 1000, duration * 1000, total)
-      self.thread_sleep(sleep_time)
-      if thread.exit_flag:
-        break
-      self._RotateCounts(throttle_name)
-
-  def _RotateCounts(self, throttle_name):
-    """Rotate the transfer counters.
-
-    If sufficient time has passed, then rotate the counters from active to
-    the prior-block of counts.
-
-    This rotation is interlocked to ensure that multiple threads do not
-    over-rotate the counts.
-
-    Args:
-      throttle_name: The name of the throttle to rotate.
-    """
-    self.VerifyName(throttle_name)
-    self.rotate_mutex[throttle_name].acquire()
-    try:
-      next_rotate_time = self.last_rotate[throttle_name] + self.ROTATE_PERIOD
-      if next_rotate_time >= self.get_time():
-        return
-
-      for name, count in self.transferred[throttle_name].items():
-
-
-        self.prior_block[throttle_name][name] = count
-        self.transferred[throttle_name][name] = 0
-
-        self.totals[throttle_name][name] += count
-
-      self.last_rotate[throttle_name] = self.get_time()
-
-    finally:
-      self.rotate_mutex[throttle_name].release()
-
-  def TotalTransferred(self, throttle_name):
-    """Return the total transferred, and over what period.
-
-    Args:
-      throttle_name: The name of the throttle to total.
-
-    Returns:
-      A tuple of the total count and running time for the given throttle name.
-    """
-    total = 0
-    for count in self.totals[throttle_name].values():
-      total += count
-    for count in self.transferred[throttle_name].values():
-      total += count
-    return total, self.get_time() - self.start_time
-
-
 class _ThreadBase(threading.Thread):
   """Provide some basic features for the threads used in the uploader.
 
@@ -1993,18 +1365,29 @@
 
     self.exit_flag = False
     self.error = None
+    self.traceback = None
 
   def run(self):
     """Perform the work of the thread."""
-    logger.info('[%s] %s: started', self.getName(), self.__class__.__name__)
+    logger.debug('[%s] %s: started', self.getName(), self.__class__.__name__)
 
     try:
       self.PerformWork()
     except:
-      self.error = sys.exc_info()[1]
+      self.SetError()
       logger.exception('[%s] %s:', self.getName(), self.__class__.__name__)
 
-    logger.info('[%s] %s: exiting', self.getName(), self.__class__.__name__)
+    logger.debug('[%s] %s: exiting', self.getName(), self.__class__.__name__)
+
+  def SetError(self):
+    """Sets the error and traceback information for this thread.
+
+    This must be called from an exception handler.
+    """
+    if not self.error:
+      exc_info = sys.exc_info()
+      self.error = exc_info[1]
+      self.traceback = exc_info[2]
 
   def PerformWork(self):
     """Perform the thread-specific work."""
@@ -2014,6 +1397,10 @@
     """If an error is present, then log it."""
     if self.error:
       logger.error('Error in %s: %s', self.GetFriendlyName(), self.error)
+      if self.traceback:
+        logger.debug(''.join(traceback.format_exception(self.error.__class__,
+                                                        self.error,
+                                                        self.traceback)))
 
   def GetFriendlyName(self):
     """Returns a human-friendly description of the thread."""
@@ -2044,292 +1431,12 @@
   return error.reason[0] not in non_fatal_error_codes
 
 
-def PrettyKey(key):
-  """Returns a nice string representation of the given key."""
-  if key is None:
-    return None
-  elif isinstance(key, db.Key):
-    return repr(key.id_or_name())
-  return str(key)
-
-
-class _BulkWorkerThread(_ThreadBase):
-  """A base class for worker threads.
-
-  This thread will read WorkItem instances from the work_queue and upload
-  the entities to the server application. Progress information will be
-  pushed into the progress_queue as the work is being performed.
-
-  If a _BulkWorkerThread encounters a transient error, the entities will be
-  resent, if a fatal error is encoutered the BulkWorkerThread exits.
-
-  Subclasses must provide implementations for PreProcessItem, TransferItem,
-  and ProcessResponse.
-  """
-
-  def __init__(self,
-               work_queue,
-               throttle,
-               thread_gate,
-               request_manager,
-               num_threads,
-               batch_size,
-               state_message,
-               get_time):
-    """Initialize the BulkLoaderThread instance.
-
-    Args:
-      work_queue: A queue containing WorkItems for processing.
-      throttle: A Throttles to control upload bandwidth.
-      thread_gate: A ThreadGate to control number of simultaneous uploads.
-      request_manager: A RequestManager instance.
-      num_threads: The number of threads for parallel transfers.
-      batch_size: The number of entities to transfer per request.
-      state_message: Used for dependency injection.
-      get_time: Used for dependency injection.
-    """
-    _ThreadBase.__init__(self)
-
-    self.work_queue = work_queue
-    self.throttle = throttle
-    self.thread_gate = thread_gate
-    self.request_manager = request_manager
-    self.num_threads = num_threads
-    self.batch_size = batch_size
-    self.state_message = state_message
-    self.get_time = get_time
-
-  def PreProcessItem(self, item):
-    """Performs pre transfer processing on a work item."""
-    raise NotImplementedError()
-
-  def TransferItem(self, item):
-    """Transfers the entities associated with an item.
-
-    Args:
-      item: An item of upload (WorkItem) or download (KeyRange) work.
-
-    Returns:
-      A tuple of (estimated transfer size, response)
-    """
-    raise NotImplementedError()
-
-  def ProcessResponse(self, item, result):
-    """Processes the response from the server application."""
-    raise NotImplementedError()
-
-  def PerformWork(self):
-    """Perform the work of a _BulkWorkerThread."""
-    while not self.exit_flag:
-      transferred = False
-      self.thread_gate.StartWork()
-      try:
-        try:
-          item = self.work_queue.get(block=True, timeout=1.0)
-        except Queue.Empty:
-          continue
-        if item == _THREAD_SHOULD_EXIT:
-          break
-
-        logger.debug('[%s] Got work item %s', self.getName(), item)
-
-        try:
-
-          item.MarkAsTransferring()
-          self.PreProcessItem(item)
-          response = None
-          try:
-            try:
-              t = self.get_time()
-              response = self.TransferItem(item)
-              status = 200
-              transferred = True
-              transfer_time = self.get_time() - t
-              logger.debug('[%s] %s Transferred %d entities in %0.1f seconds',
-                           self.getName(), item, item.count, transfer_time)
-              self.throttle.AddTransfer(RECORDS, item.count)
-            except (db.InternalError, db.NotSavedError, db.Timeout,
-                    apiproxy_errors.OverQuotaError,
-                    apiproxy_errors.DeadlineExceededError), e:
-              logger.exception('Caught non-fatal datastore error: %s', e)
-            except urllib2.HTTPError, e:
-              status = e.code
-              if status == 403 or (status >= 500 and status < 600):
-                logger.exception('Caught non-fatal HTTP error: %d %s',
-                                 status, e.msg)
-              else:
-                raise e
-            except urllib2.URLError, e:
-              if IsURLErrorFatal(e):
-                raise e
-              else:
-                logger.exception('Caught non-fatal URL error: %s', e.reason)
-
-            self.ProcessResponse(item, response)
-
-          except:
-            self.error = sys.exc_info()[1]
-            logger.exception('[%s] %s: caught exception %s', self.getName(),
-                             self.__class__.__name__, str(sys.exc_info()))
-            raise
-
-        finally:
-          if transferred:
-            item.MarkAsTransferred()
-            self.work_queue.task_done()
-            self.thread_gate.TransferSuccess(transfer_time)
-          else:
-            item.MarkAsError()
-            try:
-              self.work_queue.reput(item, block=False)
-            except Queue.Full:
-              logger.error('[%s] Failed to reput work item.', self.getName())
-              raise Error('Failed to reput work item')
-            self.thread_gate.DecreaseWorkers()
-          logger.info('%s %s',
-                      item,
-                      self.state_message(item.state))
-
-      finally:
-        self.thread_gate.FinishWork()
-
-
-  def GetFriendlyName(self):
-    """Returns a human-friendly name for this thread."""
-    return 'worker [%s]' % self.getName()
-
-
-class BulkLoaderThread(_BulkWorkerThread):
-  """A thread which transmits entities to the server application.
-
-  This thread will read WorkItem instances from the work_queue and upload
-  the entities to the server application. Progress information will be
-  pushed into the progress_queue as the work is being performed.
-
-  If a BulkLoaderThread encounters a transient error, the entities will be
-  resent, if a fatal error is encoutered the BulkLoaderThread exits.
-  """
-
-  def __init__(self,
-               work_queue,
-               throttle,
-               thread_gate,
-               request_manager,
-               num_threads,
-               batch_size,
-               get_time=time.time):
-    """Initialize the BulkLoaderThread instance.
-
-    Args:
-      work_queue: A queue containing WorkItems for processing.
-      throttle: A Throttles to control upload bandwidth.
-      thread_gate: A ThreadGate to control number of simultaneous uploads.
-      request_manager: A RequestManager instance.
-      num_threads: The number of threads for parallel transfers.
-      batch_size: The number of entities to transfer per request.
-      get_time: Used for dependency injection.
-    """
-    _BulkWorkerThread.__init__(self,
-                               work_queue,
-                               throttle,
-                               thread_gate,
-                               request_manager,
-                               num_threads,
-                               batch_size,
-                               ImportStateMessage,
-                               get_time)
-
-  def PreProcessItem(self, item):
-    """Performs pre transfer processing on a work item."""
-    if item and not item.content:
-      item.content = self.request_manager.EncodeContent(item.rows)
-
-  def TransferItem(self, item):
-    """Transfers the entities associated with an item.
-
-    Args:
-      item: An item of upload (WorkItem) work.
-
-    Returns:
-      A tuple of (estimated transfer size, response)
-    """
-    return self.request_manager.PostEntities(item)
-
-  def ProcessResponse(self, item, response):
-    """Processes the response from the server application."""
-    pass
-
-
-class BulkExporterThread(_BulkWorkerThread):
-  """A thread which recieved entities to the server application.
-
-  This thread will read KeyRange instances from the work_queue and export
-  the entities from the server application. Progress information will be
-  pushed into the progress_queue as the work is being performed.
-
-  If a BulkExporterThread encounters an error when trying to post data,
-  the thread will exit and cause the application to terminate.
-  """
-
-  def __init__(self,
-               work_queue,
-               throttle,
-               thread_gate,
-               request_manager,
-               num_threads,
-               batch_size,
-               get_time=time.time):
-
-    """Initialize the BulkExporterThread instance.
-
-    Args:
-      work_queue: A queue containing KeyRanges for processing.
-      throttle: A Throttles to control upload bandwidth.
-      thread_gate: A ThreadGate to control number of simultaneous uploads.
-      request_manager: A RequestManager instance.
-      num_threads: The number of threads for parallel transfers.
-      batch_size: The number of entities to transfer per request.
-      get_time: Used for dependency injection.
-    """
-    _BulkWorkerThread.__init__(self,
-                               work_queue,
-                               throttle,
-                               thread_gate,
-                               request_manager,
-                               num_threads,
-                               batch_size,
-                               ExportStateMessage,
-                               get_time)
-
-  def PreProcessItem(self, unused_item):
-    """Performs pre transfer processing on a work item."""
-    pass
-
-  def TransferItem(self, item):
-    """Transfers the entities associated with an item.
-
-    Args:
-      item: An item of download (KeyRange) work.
-
-    Returns:
-      A tuple of (estimated transfer size, response)
-    """
-    return self.request_manager.GetEntities(item)
-
-  def ProcessResponse(self, item, export_result):
-    """Processes the response from the server application."""
-    if export_result:
-      item.Process(export_result, self.num_threads, self.batch_size,
-                   self.work_queue)
-    item.state = STATE_GOT
-
-
 class DataSourceThread(_ThreadBase):
   """A thread which reads WorkItems and pushes them into queue.
 
   This thread will read/consume WorkItems from a generator (produced by
   the generator factory). These WorkItems will then be pushed into the
-  work_queue. Note that reading will block if/when the work_queue becomes
+  thread_pool. Note that reading will block if/when the thread_pool becomes
   full. Information on content consumed from the generator will be pushed
   into the progress_queue.
   """
@@ -2337,14 +1444,16 @@
   NAME = 'data source thread'
 
   def __init__(self,
-               work_queue,
+               request_manager,
+               thread_pool,
                progress_queue,
                workitem_generator_factory,
                progress_generator_factory):
     """Initialize the DataSourceThread instance.
 
     Args:
-      work_queue: A queue containing WorkItems for processing.
+      request_manager: A RequestManager instance.
+      thread_pool: An AdaptiveThreadPool instance.
       progress_queue: A queue used for tracking progress information.
       workitem_generator_factory: A factory that creates a WorkItem generator
       progress_generator_factory: A factory that creates a generator which
@@ -2353,7 +1462,8 @@
     """
     _ThreadBase.__init__(self)
 
-    self.work_queue = work_queue
+    self.request_manager = request_manager
+    self.thread_pool = thread_pool
     self.progress_queue = progress_queue
     self.workitem_generator_factory = workitem_generator_factory
     self.progress_generator_factory = progress_generator_factory
@@ -2366,7 +1476,8 @@
     else:
       progress_gen = None
 
-    content_gen = self.workitem_generator_factory(self.progress_queue,
+    content_gen = self.workitem_generator_factory(self.request_manager,
+                                                  self.progress_queue,
                                                   progress_gen)
 
     self.xfer_count = 0
@@ -2378,7 +1489,7 @@
 
       while not self.exit_flag:
         try:
-          self.work_queue.put(item, block=True, timeout=1.0)
+          self.thread_pool.SubmitItem(item, block=True, timeout=1.0)
           self.entity_count += item.count
           break
         except Queue.Full:
@@ -2526,6 +1637,70 @@
     self.update_cursor = self.secondary_conn.cursor()
 
 
+zero_matcher = re.compile(r'\x00')
+
+zero_one_matcher = re.compile(r'\x00\x01')
+
+
+def KeyStr(key):
+  """Returns a string to represent a key, preserving ordering.
+
+  Unlike datastore.Key.__str__(), we have the property:
+
+    key1 < key2 ==> KeyStr(key1) < KeyStr(key2)
+
+  The key string is constructed from the key path as follows:
+    (1) Strings are prepended with ':' and numeric id's are padded to
+        20 digits.
+    (2) Any null characters (u'\0') present are replaced with u'\0\1'
+    (3) The sequence u'\0\0' is used to separate each component of the path.
+
+  (1) assures that names and ids compare properly, while (2) and (3) enforce
+  the part-by-part comparison of pieces of the path.
+
+  Args:
+    key: A datastore.Key instance.
+
+  Returns:
+    A string representation of the key, which preserves ordering.
+  """
+  assert isinstance(key, datastore.Key)
+  path = key.to_path()
+
+  out_path = []
+  for part in path:
+    if isinstance(part, (int, long)):
+      part = '%020d' % part
+    else:
+      part = ':%s' % part
+
+    out_path.append(zero_matcher.sub(u'\0\1', part))
+
+  out_str = u'\0\0'.join(out_path)
+
+  return out_str
+
+
+def StrKey(key_str):
+  """The inverse of the KeyStr function.
+
+  Args:
+    key_str: A string in the range of KeyStr.
+
+  Returns:
+    A datastore.Key instance k, such that KeyStr(k) == key_str.
+  """
+  parts = key_str.split(u'\0\0')
+  for i in xrange(len(parts)):
+    if parts[i][0] == ':':
+      part = parts[i][1:]
+      part = zero_one_matcher.sub(u'\0', part)
+      parts[i] = part
+    else:
+      parts[i] = int(parts[i])
+  return datastore.Key.from_path(*parts)
+
+
 class ResultDatabase(_Database):
   """Persistently record all the entities downloaded during an export.
 
@@ -2544,7 +1719,7 @@
     """
     self.complete = False
     create_table = ('create table result (\n'
-                    'id TEXT primary key,\n'
+                    'id BLOB primary key,\n'
                     'value BLOB not null)')
 
     _Database.__init__(self,
@@ -2560,34 +1735,37 @@
       self.existing_count = 0
     self.count = self.existing_count
 
-  def _StoreEntity(self, entity_id, value):
+  def _StoreEntity(self, entity_id, entity):
     """Store an entity in the result database.
 
     Args:
-      entity_id: A db.Key for the entity.
-      value: A string of the contents of the entity.
+      entity_id: A datastore.Key for the entity.
+      entity: The entity to store.
 
     Returns:
       True if this entities is not already present in the result database.
     """
 
     assert _RunningInThread(self.secondary_thread)
-    assert isinstance(entity_id, db.Key)
-
-    entity_id = entity_id.id_or_name()
+    assert isinstance(entity_id, datastore.Key), (
+        'expected a datastore.Key, got a %s' % entity_id.__class__.__name__)
+
+    key_str = buffer(KeyStr(entity_id).encode('utf-8'))
     self.insert_cursor.execute(
-        'select count(*) from result where id = ?', (unicode(entity_id),))
+        'select count(*) from result where id = ?', (key_str,))
+
     already_present = self.insert_cursor.fetchone()[0]
     result = True
     if already_present:
       result = False
       self.insert_cursor.execute('delete from result where id = ?',
-                                 (unicode(entity_id),))
+                                 (key_str,))
     else:
       self.count += 1
+    value = entity.Encode()
     self.insert_cursor.execute(
         'insert into result (id, value) values (?, ?)',
-        (unicode(entity_id), buffer(value)))
+        (key_str, buffer(value)))
     return result
 
   def StoreEntities(self, keys, entities):
@@ -2603,9 +1781,9 @@
     self._OpenSecondaryConnection()
     t = time.time()
     count = 0
-    for entity_id, value in zip(keys,
-                                entities):
-      if self._StoreEntity(entity_id, value):
+    for entity_id, entity in zip(keys,
+                                 entities):
+      if self._StoreEntity(entity_id, entity):
         count += 1
     logger.debug('%s insert: delta=%.3f',
                  self.db_filename,
@@ -2627,7 +1805,8 @@
         'select id, value from result order by id')
 
     for unused_entity_id, entity in cursor:
-      yield cPickle.loads(str(entity))
+      entity_proto = entity_pb.EntityProto(contents=entity)
+      yield datastore.Entity._FromPb(entity_proto)
 
 
 class _ProgressDatabase(_Database):
@@ -2723,9 +1902,16 @@
     self._OpenSecondaryConnection()
 
     assert _RunningInThread(self.secondary_thread)
-    assert not key_start or isinstance(key_start, self.py_type)
-    assert not key_end or isinstance(key_end, self.py_type), '%s is a %s' % (
-        key_end, key_end.__class__)
+    assert (not key_start) or isinstance(key_start, self.py_type), (
+        '%s is a %s, %s expected %s' % (key_start,
+                                        key_start.__class__,
+                                        self.__class__.__name__,
+                                        self.py_type))
+    assert (not key_end) or isinstance(key_end, self.py_type), (
+        '%s is a %s, %s expected %s' % (key_end,
+                                        key_end.__class__,
+                                        self.__class__.__name__,
+                                        self.py_type))
     assert KeyLEQ(key_start, key_end), '%s not less than %s' % (
         repr(key_start), repr(key_end))
 
@@ -2843,7 +2029,7 @@
     _ProgressDatabase.__init__(self,
                                db_filename,
                                'TEXT',
-                               db.Key,
+                               datastore.Key,
                                signature,
                                commit_periodicity=1)
 
@@ -3011,34 +2197,72 @@
     exporter.output_entities(self.result_db.AllEntities())
 
   def UpdateProgress(self, item):
-    """Update the state of the given KeyRange.
+    """Update the state of the given KeyRangeItem.
 
     Args:
       item: A KeyRange instance.
     """
     if item.state == STATE_GOT:
-      count = self.result_db.StoreEntities(item.export_result.keys,
-                                           item.export_result.entities)
+      count = self.result_db.StoreEntities(item.download_result.keys,
+                                           item.download_result.entities)
       self.db.DeleteKey(item.progress_key)
       self.entities_transferred += count
     else:
       self.db.UpdateState(item.progress_key, item.state)
 
 
+class MapperProgressThread(_ProgressThreadBase):
+  """A thread to record progress information for maps over the datastore."""
+
+  def __init__(self, kind, progress_queue, progress_db):
+    """Initialize the MapperProgressThread instance.
+
+    Args:
+      kind: The kind of entities being stored in the database.
+      progress_queue: A Queue used for tracking progress information.
+      progress_db: The database for tracking progress information; should
+        be an instance of ProgressDatabase.
+    """
+    _ProgressThreadBase.__init__(self, progress_queue, progress_db)
+
+    self.kind = kind
+    self.mapper = Mapper.RegisteredMapper(self.kind)
+
+  def EntitiesTransferred(self):
+    """Return the total number of unique entities transferred."""
+    return self.entities_transferred
+
+  def WorkFinished(self):
+    """Perform actions after map is complete."""
+    pass
+
+  def UpdateProgress(self, item):
+    """Update the state of the given KeyRangeItem.
+
+    Args:
+      item: A KeyRange instance.
+    """
+    if item.state == STATE_GOT:
+      self.entities_transferred += item.count
+      self.db.DeleteKey(item.progress_key)
+    else:
+      self.db.UpdateState(item.progress_key, item.state)
+
+
 def ParseKey(key_string):
-  """Turn a key stored in the database into a db.Key or None.
+  """Turn a key stored in the database into a Key or None.
 
   Args:
-    key_string: The string representation of a db.Key.
+    key_string: The string representation of a Key.
 
   Returns:
-    A db.Key instance or None
+    A datastore.Key instance or None
   """
   if not key_string:
     return None
   if key_string == 'None':
     return None
-  return db.Key(encoded=key_string)
+  return datastore.Key(encoded=key_string)
 
 
 def Validate(value, typ):
@@ -3097,9 +2321,7 @@
   def __init__(self, kind, properties):
     """Constructor.
 
-    Populates this Loader's kind and properties map. Also registers it with
-    the bulk loader, so that all you need to do is instantiate your Loader,
-    and the bulkload handler will automatically use it.
+    Populates this Loader's kind and properties map.
 
     Args:
       kind: a string containing the entity kind that this loader handles
@@ -3139,7 +2361,11 @@
 
   @staticmethod
   def RegisterLoader(loader):
-
+    """Register loader and the Loader instance for its kind.
+
+    Args:
+      loader: A Loader instance.
+    """
     Loader.__loaders[loader.kind] = loader
 
   def alias_old_names(self):
@@ -3166,7 +2392,7 @@
     Args:
       values: list/tuple of str
       key_name: if provided, the name for the (single) resulting entity
-      parent: A db.Key instance for the parent, or None
+      parent: A datastore.Key instance for the parent, or None
 
     Returns:
       list of db.Model
@@ -3222,7 +2448,7 @@
     server generated numeric key), or a string which neither starts
     with a digit nor has the form __*__ (see
     http://code.google.com/appengine/docs/python/datastore/keysandentitygroups.html),
-    or a db.Key instance.
+    or a datastore.Key instance.
 
     If you generate your own string keys, keep in mind:
 
@@ -3305,6 +2531,51 @@
     return Loader.__loaders[kind]
 
 
+class RestoreThread(_ThreadBase):
+  """A thread to read saved entity_pbs from sqlite3."""
+  NAME = 'RestoreThread'
+  _ENTITIES_DONE = 'Entities Done'
+
+  def __init__(self, queue, filename):
+    _ThreadBase.__init__(self)
+    self.queue = queue
+    self.filename = filename
+
+  def PerformWork(self):
+    db_conn = sqlite3.connect(self.filename)
+    cursor = db_conn.cursor()
+    cursor.execute('select id, value from result')
+    for entity_id, value in cursor:
+      self.queue.put([entity_id, value], block=True)
+    self.queue.put(RestoreThread._ENTITIES_DONE, block=True)
+
+
+class RestoreLoader(Loader):
+  """A Loader which imports protobuffers from a file."""
+
+  def __init__(self, kind):
+    self.kind = kind
+
+  def initialize(self, filename, loader_opts):
+    CheckFile(filename)
+    self.queue = Queue.Queue(1000)
+    restore_thread = RestoreThread(self.queue, filename)
+    restore_thread.start()
+
+  def generate_records(self, filename):
+    while True:
+      record = self.queue.get(block=True)
+      if id(record) == id(RestoreThread._ENTITIES_DONE):
+        break
+      yield record
+
+  def create_entity(self, values, key_name=None, parent=None):
+    key = StrKey(unicode(values[0], 'utf-8'))
+    entity_proto = entity_pb.EntityProto(contents=str(values[1]))
+    entity_proto.mutable_key().CopyFrom(key._Key__reference)
+    return datastore.Entity._FromPb(entity_proto)
+
+
 class Exporter(object):
   """A base class for serializing datastore entities.
 
@@ -3326,9 +2597,7 @@
   def __init__(self, kind, properties):
     """Constructor.
 
-    Populates this Exporters's kind and properties map. Also registers
-    it so that all you need to do is instantiate your Exporter, and
-    the bulkload handler will automatically use it.
+    Populates this Exporters's kind and properties map.
 
     Args:
       kind: a string containing the entity kind that this exporter handles
@@ -3370,7 +2639,11 @@
 
   @staticmethod
   def RegisterExporter(exporter):
-
+    """Register exporter and the Exporter instance for its kind.
+
+    Args:
+      exporter: A Exporter instance.
+    """
     Exporter.__exporters[exporter.kind] = exporter
 
   def __ExtractProperties(self, entity):
@@ -3388,7 +2661,7 @@
     encoding = []
     for name, fn, default in self.__properties:
       try:
-        encoding.append(fn(getattr(entity, name)))
+        encoding.append(fn(entity[name]))
       except AttributeError:
         if default is None:
           raise MissingPropertyError(name)
@@ -3468,6 +2741,87 @@
     return Exporter.__exporters[kind]
 
 
+class DumpExporter(Exporter):
+  """An exporter which dumps protobuffers to a file."""
+
+  def __init__(self, kind, result_db_filename):
+    self.kind = kind
+    self.result_db_filename = result_db_filename
+
+  def output_entities(self, entity_generator):
+    shutil.copyfile(self.result_db_filename, self.output_filename)
+
+
+class MapperRetry(Error):
+  """An exception that indicates a non-fatal error during mapping."""
+
+
+class Mapper(object):
+  """A base class for serializing datastore entities.
+
+  To add a handler for exporting an entity kind from your datastore,
+  write a subclass of this class that calls Mapper.__init__ from your
+  class's __init__.
+
+  You need to implement to batch_apply or apply method on your subclass
+  for the map to do anything.
+  """
+
+  __mappers = {}
+  kind = None
+
+  def __init__(self, kind):
+    """Constructor.
+
+    Populates this Mappers's kind.
+
+    Args:
+      kind: a string containing the entity kind that this mapper handles
+    """
+    Validate(kind, basestring)
+    self.kind = kind
+
+    GetImplementationClass(kind)
+
+  @staticmethod
+  def RegisterMapper(mapper):
+    """Register mapper and the Mapper instance for its kind.
+
+    Args:
+      mapper: A Mapper instance.
+    """
+    Mapper.__mappers[mapper.kind] = mapper
+
+  def initialize(self, mapper_opts):
+    """Performs initialization.
+
+    Args:
+      mapper_opts: The string given as the --mapper_opts flag argument.
+    """
+    pass
+
+  def finalize(self):
+    """Performs finalization actions after the download completes."""
+    pass
+
+  def apply(self, entity):
+    print 'Default map function doing nothing to %s' % entity
+
+  def batch_apply(self, entities):
+    for entity in entities:
+      self.apply(entity)
+
+  @staticmethod
+  def RegisteredMappers():
+    """Returns a dictionary of the mapper instances that have been created."""
+    return dict(Mapper.__mappers)
+
+  @staticmethod
+  def RegisteredMapper(kind):
+    """Returns an mapper instance for the given kind if it exists."""
+    return Mapper.__mappers[kind]
+
+
 class QueueJoinThread(threading.Thread):
   """A thread that joins a queue and exits.
 
@@ -3492,7 +2846,7 @@
 
 def InterruptibleQueueJoin(queue,
                            thread_local,
-                           thread_gate,
+                           thread_pool,
                            queue_join_thread_factory=QueueJoinThread,
                            check_workers=True):
   """Repeatedly joins the given ReQueue or Queue.Queue with short timeout.
@@ -3502,7 +2856,7 @@
   Args:
     queue: A Queue.Queue or ReQueue instance.
     thread_local: A threading.local instance which indicates interrupts.
-    thread_gate: A ThreadGate instance.
+    thread_pool: An AdaptiveThreadPool instance.
     queue_join_thread_factory: Used for dependency injection.
     check_workers: Whether to interrupt the join on worker death.
 
@@ -3519,41 +2873,29 @@
       logger.debug('Queue join interrupted')
       return False
     if check_workers:
-      for worker_thread in thread_gate.Threads():
+      for worker_thread in thread_pool.Threads():
         if not worker_thread.isAlive():
           return False
 
 
-def ShutdownThreads(data_source_thread, work_queue, thread_gate):
+def ShutdownThreads(data_source_thread, thread_pool):
   """Shuts down the worker and data source threads.
 
   Args:
     data_source_thread: A running DataSourceThread instance.
-    work_queue: The work queue.
-    thread_gate: A ThreadGate instance with workers registered.
+    thread_pool: An AdaptiveThreadPool instance with workers registered.
   """
   logger.info('An error occurred. Shutting down...')
 
   data_source_thread.exit_flag = True
 
-  for thread in thread_gate.Threads():
-    thread.exit_flag = True
-
-  for unused_thread in thread_gate.Threads():
-    thread_gate.EnableThread()
+  thread_pool.Shutdown()
 
   data_source_thread.join(timeout=3.0)
   if data_source_thread.isAlive():
     logger.warn('%s hung while trying to exit',
                 data_source_thread.GetFriendlyName())
 
-  while not work_queue.empty():
-    try:
-      unused_item = work_queue.get_nowait()
-      work_queue.task_done()
-    except Queue.Empty:
-      pass
-
 
 class BulkTransporterApp(object):
   """Class to wrap bulk transport application functionality."""
@@ -3563,13 +2905,12 @@
                input_generator_factory,
                throttle,
                progress_db,
-               workerthread_factory,
                progresstrackerthread_factory,
                max_queue_size=DEFAULT_QUEUE_SIZE,
                request_manager_factory=RequestManager,
                datasourcethread_factory=DataSourceThread,
-               work_queue_factory=ReQueue,
-               progress_queue_factory=Queue.Queue):
+               progress_queue_factory=Queue.Queue,
+               thread_pool_factory=adaptive_thread_pool.AdaptiveThreadPool):
     """Instantiate a BulkTransporterApp.
 
     Uploads or downloads data to or from application using HTTP requests.
@@ -3584,13 +2925,12 @@
       input_generator_factory: A factory that creates a WorkItem generator.
       throttle: A Throttle instance.
       progress_db: The database to use for replaying/recording progress.
-      workerthread_factory: A factory for worker threads.
       progresstrackerthread_factory: Used for dependency injection.
       max_queue_size: Maximum size of the queues before they should block.
       request_manager_factory: Used for dependency injection.
       datasourcethread_factory: Used for dependency injection.
-      work_queue_factory: Used for dependency injection.
       progress_queue_factory: Used for dependency injection.
+      thread_pool_factory: Used for dependency injection.
     """
     self.app_id = arg_dict['app_id']
     self.post_url = arg_dict['url']
@@ -3600,15 +2940,15 @@
     self.num_threads = arg_dict['num_threads']
     self.email = arg_dict['email']
     self.passin = arg_dict['passin']
+    self.dry_run = arg_dict['dry_run']
     self.throttle = throttle
     self.progress_db = progress_db
-    self.workerthread_factory = workerthread_factory
     self.progresstrackerthread_factory = progresstrackerthread_factory
     self.max_queue_size = max_queue_size
     self.request_manager_factory = request_manager_factory
     self.datasourcethread_factory = datasourcethread_factory
-    self.work_queue_factory = work_queue_factory
     self.progress_queue_factory = progress_queue_factory
+    self.thread_pool_factory = thread_pool_factory
     (scheme,
      self.host_port, self.url_path,
      unused_query, unused_fragment) = urlparse.urlsplit(self.post_url)
@@ -3623,13 +2963,13 @@
     Returns:
       Error code suitable for sys.exit, e.g. 0 on success, 1 on failure.
     """
-    thread_gate = ThreadGate(True)
+    self.error = False
+    thread_pool = self.thread_pool_factory(
+        self.num_threads, queue_size=self.max_queue_size)
 
     self.throttle.Register(threading.currentThread())
     threading.currentThread().exit_flag = False
 
-    work_queue = self.work_queue_factory(self.max_queue_size)
-
     progress_queue = self.progress_queue_factory(self.max_queue_size)
     request_manager = self.request_manager_factory(self.app_id,
                                                    self.host_port,
@@ -3639,27 +2979,23 @@
                                                    self.batch_size,
                                                    self.secure,
                                                    self.email,
-                                                   self.passin)
+                                                   self.passin,
+                                                   self.dry_run)
     try:
       request_manager.Authenticate()
     except Exception, e:
+      self.error = True
       if not isinstance(e, urllib2.HTTPError) or (
           e.code != 302 and e.code != 401):
         logger.exception('Exception during authentication')
       raise AuthenticationError()
     if (request_manager.auth_called and
         not request_manager.authenticated):
+      self.error = True
       raise AuthenticationError('Authentication failed')
 
-    for unused_idx in xrange(self.num_threads):
-      thread = self.workerthread_factory(work_queue,
-                                         self.throttle,
-                                         thread_gate,
-                                         request_manager,
-                                         self.num_threads,
-                                         self.batch_size)
+    for thread in thread_pool.Threads():
       self.throttle.Register(thread)
-      thread_gate.Register(thread)
 
     self.progress_thread = self.progresstrackerthread_factory(
         progress_queue, self.progress_db)
@@ -3671,7 +3007,8 @@
       progress_generator_factory = None
 
     self.data_source_thread = (
-        self.datasourcethread_factory(work_queue,
+        self.datasourcethread_factory(request_manager,
+                                      thread_pool,
                                       progress_queue,
                                       self.input_generator_factory,
                                       progress_generator_factory))
@@ -3682,60 +3019,54 @@
     def Interrupt(unused_signum, unused_frame):
       """Shutdown gracefully in response to a signal."""
       thread_local.shut_down = True
+      self.error = True
 
     signal.signal(signal.SIGINT, Interrupt)
 
     self.progress_thread.start()
     self.data_source_thread.start()
-    for thread in thread_gate.Threads():
-      thread.start()
 
 
     while not thread_local.shut_down:
       self.data_source_thread.join(timeout=0.25)
 
       if self.data_source_thread.isAlive():
-        for thread in list(thread_gate.Threads()) + [self.progress_thread]:
+        for thread in list(thread_pool.Threads()) + [self.progress_thread]:
           if not thread.isAlive():
             logger.info('Unexpected thread death: %s', thread.getName())
             thread_local.shut_down = True
+            self.error = True
             break
       else:
         break
 
-    if thread_local.shut_down:
-      ShutdownThreads(self.data_source_thread, work_queue, thread_gate)
-
     def _Join(ob, msg):
       logger.debug('Waiting for %s...', msg)
       if isinstance(ob, threading.Thread):
         ob.join(timeout=3.0)
         if ob.isAlive():
-          logger.debug('Joining %s failed', ob.GetFriendlyName())
+          logger.debug('Joining %s failed', ob)
         else:
           logger.debug('... done.')
       elif isinstance(ob, (Queue.Queue, ReQueue)):
-        if not InterruptibleQueueJoin(ob, thread_local, thread_gate):
-          ShutdownThreads(self.data_source_thread, work_queue, thread_gate)
+        if not InterruptibleQueueJoin(ob, thread_local, thread_pool):
+          ShutdownThreads(self.data_source_thread, thread_pool)
       else:
         ob.join()
         logger.debug('... done.')
 
-    _Join(work_queue, 'work_queue to flush')
-
-    for unused_thread in thread_gate.Threads():
-      work_queue.put(_THREAD_SHOULD_EXIT)
-
-    for unused_thread in thread_gate.Threads():
-      thread_gate.EnableThread()
-
-    for thread in thread_gate.Threads():
-      _Join(thread, 'thread [%s] to terminate' % thread.getName())
-
-      thread.CheckError()
+    if self.data_source_thread.error or thread_local.shut_down:
+      ShutdownThreads(self.data_source_thread, thread_pool)
+    else:
+      _Join(thread_pool.requeue, 'worker threads to finish')
+
+    thread_pool.Shutdown()
+    thread_pool.JoinThreads()
+    thread_pool.CheckErrors()
+    print ''
 
     if self.progress_thread.isAlive():
-      InterruptibleQueueJoin(progress_queue, thread_local, thread_gate,
+      InterruptibleQueueJoin(progress_queue, thread_local, thread_pool,
                              check_workers=False)
     else:
       logger.warn('Progress thread exited prematurely')
@@ -3763,9 +3094,10 @@
 
   def ReportStatus(self):
     """Display a message reporting the final status of the transfer."""
-    total_up, duration = self.throttle.TotalTransferred(BANDWIDTH_UP)
+    total_up, duration = self.throttle.TotalTransferred(
+        remote_api_throttle.BANDWIDTH_UP)
     s_total_up, unused_duration = self.throttle.TotalTransferred(
-        HTTPS_BANDWIDTH_UP)
+        remote_api_throttle.HTTPS_BANDWIDTH_UP)
     total_up += s_total_up
     total = total_up
     logger.info('%d entites total, %d previously transferred',
@@ -3793,18 +3125,49 @@
 
   def ReportStatus(self):
     """Display a message reporting the final status of the transfer."""
-    total_down, duration = self.throttle.TotalTransferred(BANDWIDTH_DOWN)
+    total_down, duration = self.throttle.TotalTransferred(
+        remote_api_throttle.BANDWIDTH_DOWN)
     s_total_down, unused_duration = self.throttle.TotalTransferred(
-        HTTPS_BANDWIDTH_DOWN)
+        remote_api_throttle.HTTPS_BANDWIDTH_DOWN)
     total_down += s_total_down
     total = total_down
     existing_count = self.progress_thread.existing_count
     xfer_count = self.progress_thread.EntitiesTransferred()
     logger.info('Have %d entities, %d previously transferred',
-                xfer_count + existing_count, existing_count)
+                xfer_count, existing_count)
     logger.info('%d entities (%d bytes) transferred in %.1f seconds',
                 xfer_count, total, duration)
-    return 0
+    if self.error:
+      return 1
+    else:
+      return 0
+
+
+class BulkMapperApp(BulkTransporterApp):
+  """Class to encapsulate bulk map functionality."""
+
+  def __init__(self, *args, **kwargs):
+    BulkTransporterApp.__init__(self, *args, **kwargs)
+
+  def ReportStatus(self):
+    """Display a message reporting the final status of the transfer."""
+    total_down, duration = self.throttle.TotalTransferred(
+        remote_api_throttle.BANDWIDTH_DOWN)
+    s_total_down, unused_duration = self.throttle.TotalTransferred(
+        remote_api_throttle.HTTPS_BANDWIDTH_DOWN)
+    total_down += s_total_down
+    total = total_down
+    xfer_count = self.progress_thread.EntitiesTransferred()
+    logger.info('The following may be inaccurate if any mapper tasks '
+                'encountered errors and had to be retried.')
+    logger.info('Applied mapper to %s entities.',
+                 xfer_count)
+    logger.info('%s entities (%s bytes) transferred in %.1f seconds',
+                 xfer_count, total, duration)
+    if self.error:
+      return 1
+    else:
+      return 0
 
 
 def PrintUsageExit(code):
@@ -3843,18 +3206,24 @@
              'loader_opts=',
              'exporter_opts=',
              'log_file=',
+             'mapper_opts=',
              'email=',
              'passin',
+             'map',
+             'dry_run',
+             'dump',
+             'restore',
              ]
 
 
-def ParseArguments(argv):
+def ParseArguments(argv, die_fn=lambda: PrintUsageExit(1)):
   """Parses command-line arguments.
 
   Prints out a help message if -h or --help is supplied.
 
   Args:
     argv: List of command-line arguments.
+    die_fn: Function to invoke to end the program.
 
   Returns:
     A dictionary containing the value of command-line options.
@@ -3867,11 +3236,11 @@
   arg_dict = {}
 
   arg_dict['url'] = REQUIRED_OPTION
-  arg_dict['filename'] = REQUIRED_OPTION
-  arg_dict['config_file'] = REQUIRED_OPTION
-  arg_dict['kind'] = REQUIRED_OPTION
-
-  arg_dict['batch_size'] = DEFAULT_BATCH_SIZE
+  arg_dict['filename'] = None
+  arg_dict['config_file'] = None
+  arg_dict['kind'] = None
+
+  arg_dict['batch_size'] = None
   arg_dict['num_threads'] = DEFAULT_THREAD_COUNT
   arg_dict['bandwidth_limit'] = DEFAULT_BANDWIDTH_LIMIT
   arg_dict['rps_limit'] = DEFAULT_RPS_LIMIT
@@ -3889,6 +3258,11 @@
   arg_dict['log_file'] = None
   arg_dict['email'] = None
   arg_dict['passin'] = False
+  arg_dict['mapper_opts'] = None
+  arg_dict['map'] = False
+  arg_dict['dry_run'] = False
+  arg_dict['dump'] = False
+  arg_dict['restore'] = False
 
   def ExpandFilename(filename):
     """Expand shell variables and ~usernames in filename."""
@@ -3938,26 +3312,39 @@
     elif option == '--exporter_opts':
       arg_dict['exporter_opts'] = value
     elif option == '--log_file':
-      arg_dict['log_file'] = value
+      arg_dict['log_file'] = ExpandFilename(value)
     elif option == '--email':
       arg_dict['email'] = value
     elif option == '--passin':
       arg_dict['passin'] = True
-
-  return ProcessArguments(arg_dict, die_fn=lambda: PrintUsageExit(1))
+    elif option == '--map':
+      arg_dict['map'] = True
+    elif option == '--mapper_opts':
+      arg_dict['mapper_opts'] = value
+    elif option == '--dry_run':
+      arg_dict['dry_run'] = True
+    elif option == '--dump':
+      arg_dict['dump'] = True
+    elif option == '--restore':
+      arg_dict['restore'] = True
+
+  return ProcessArguments(arg_dict, die_fn=die_fn)
 
 
 def ThrottleLayout(bandwidth_limit, http_limit, rps_limit):
   """Return a dictionary indicating the throttle options."""
-  return {
-      BANDWIDTH_UP: bandwidth_limit,
-      BANDWIDTH_DOWN: bandwidth_limit,
-      REQUESTS: http_limit,
-      HTTPS_BANDWIDTH_UP: bandwidth_limit / 5,
-      HTTPS_BANDWIDTH_DOWN: bandwidth_limit / 5,
-      HTTPS_REQUESTS: http_limit / 5,
-      RECORDS: rps_limit,
-  }
+  bulkloader_limits = dict(remote_api_throttle.NO_LIMITS)
+  bulkloader_limits.update({
+      remote_api_throttle.BANDWIDTH_UP: bandwidth_limit,
+      remote_api_throttle.BANDWIDTH_DOWN: bandwidth_limit,
+      remote_api_throttle.REQUESTS: http_limit,
+      remote_api_throttle.HTTPS_BANDWIDTH_UP: bandwidth_limit,
+      remote_api_throttle.HTTPS_BANDWIDTH_DOWN: bandwidth_limit,
+      remote_api_throttle.HTTPS_REQUESTS: http_limit,
+      remote_api_throttle.ENTITIES_FETCHED: rps_limit,
+      remote_api_throttle.ENTITIES_MODIFIED: rps_limit,
+  })
+  return bulkloader_limits
 
 
 def CheckOutputFile(filename):
@@ -3969,12 +3356,13 @@
   Raises:
     FileExistsError: if the given filename is not found
     FileNotWritableError: if the given filename is not readable.
-  """
-  if os.path.exists(filename):
+    """
+  full_path = os.path.abspath(filename)
+  if os.path.exists(full_path):
     raise FileExistsError('%s: output file exists' % filename)
-  elif not os.access(os.path.dirname(filename), os.W_OK):
+  elif not os.access(os.path.dirname(full_path), os.W_OK):
     raise FileNotWritableError(
-        '%s: not writable' % os.path.dirname(filename))
+        '%s: not writable' % os.path.dirname(full_path))
 
 
 def LoadConfig(config_file_name, exit_fn=sys.exit):
@@ -3999,6 +3387,11 @@
       if hasattr(bulkloader_config, 'exporters'):
         for cls in bulkloader_config.exporters:
           Exporter.RegisterExporter(cls())
+
+      if hasattr(bulkloader_config, 'mappers'):
+        for cls in bulkloader_config.mappers:
+          Mapper.RegisterMapper(cls())
+
     except NameError, e:
       m = re.search(r"[^']*'([^']*)'.*", str(e))
       if m.groups() and m.group(1) == 'Loader':
@@ -4058,9 +3451,12 @@
                    url=None,
                    kind=None,
                    db_filename=None,
+                   perform_map=None,
                    download=None,
                    has_header=None,
-                   result_db_filename=None):
+                   result_db_filename=None,
+                   dump=None,
+                   restore=None):
   """Returns a string that identifies the important options for the database."""
   if download:
     result_db_line = 'result_db: %s' % result_db_filename
@@ -4071,10 +3467,14 @@
   url: %s
   kind: %s
   download: %s
+  map: %s
+  dump: %s
+  restore: %s
   progress_db: %s
   has_header: %s
   %s
-  """ % (app_id, url, kind, download, db_filename, has_header, result_db_line)
+  """ % (app_id, url, kind, download, perform_map, dump, restore, db_filename,
+         has_header, result_db_line)
 
 
 def ProcessArguments(arg_dict,
@@ -4090,6 +3490,8 @@
   """
   app_id = GetArgument(arg_dict, 'app_id', die_fn)
   url = GetArgument(arg_dict, 'url', die_fn)
+  dump = GetArgument(arg_dict, 'dump', die_fn)
+  restore = GetArgument(arg_dict, 'restore', die_fn)
   filename = GetArgument(arg_dict, 'filename', die_fn)
   batch_size = GetArgument(arg_dict, 'batch_size', die_fn)
   kind = GetArgument(arg_dict, 'kind', die_fn)
@@ -4098,21 +3500,18 @@
   result_db_filename = GetArgument(arg_dict, 'result_db_filename', die_fn)
   download = GetArgument(arg_dict, 'download', die_fn)
   log_file = GetArgument(arg_dict, 'log_file', die_fn)
-
-  unused_passin = GetArgument(arg_dict, 'passin', die_fn)
-  unused_email = GetArgument(arg_dict, 'email', die_fn)
-  unused_debug = GetArgument(arg_dict, 'debug', die_fn)
-  unused_num_threads = GetArgument(arg_dict, 'num_threads', die_fn)
-  unused_bandwidth_limit = GetArgument(arg_dict, 'bandwidth_limit', die_fn)
-  unused_rps_limit = GetArgument(arg_dict, 'rps_limit', die_fn)
-  unused_http_limit = GetArgument(arg_dict, 'http_limit', die_fn)
-  unused_auth_domain = GetArgument(arg_dict, 'auth_domain', die_fn)
-  unused_has_headers = GetArgument(arg_dict, 'has_header', die_fn)
-  unused_loader_opts = GetArgument(arg_dict, 'loader_opts', die_fn)
-  unused_exporter_opts = GetArgument(arg_dict, 'exporter_opts', die_fn)
+  perform_map = GetArgument(arg_dict, 'map', die_fn)
 
   errors = []
 
+  if batch_size is None:
+    if download or perform_map:
+      arg_dict['batch_size'] = DEFAULT_DOWNLOAD_BATCH_SIZE
+    else:
+      arg_dict['batch_size'] = DEFAULT_BATCH_SIZE
+  elif batch_size <= 0:
+    errors.append('batch_size must be at least 1')
+
   if db_filename is None:
     arg_dict['db_filename'] = time.strftime(
         'bulkloader-progress-%Y%m%d.%H%M%S.sql3')
@@ -4124,37 +3523,35 @@
   if log_file is None:
     arg_dict['log_file'] = time.strftime('bulkloader-log-%Y%m%d.%H%M%S')
 
-  if batch_size <= 0:
-    errors.append('batch_size must be at least 1')
-
   required = '%s argument required'
 
+  if config_file is None and not dump and not restore:
+    errors.append('One of --config_file, --dump, or --restore is required')
+
   if url is REQUIRED_OPTION:
     errors.append(required % 'url')
 
-  if filename is REQUIRED_OPTION:
+  if not filename and not perform_map:
     errors.append(required % 'filename')
 
-  if kind is REQUIRED_OPTION:
-    errors.append(required % 'kind')
-
-  if config_file is REQUIRED_OPTION:
-    errors.append(required % 'config_file')
-
-  if download:
-    if result_db_filename is REQUIRED_OPTION:
-      errors.append(required % 'result_db_filename')
+  if kind is None:
+    if download or map:
+      errors.append('kind argument required for this operation')
+    elif not dump and not restore:
+      errors.append(
+          'kind argument required unless --dump or --restore is specified')
 
   if not app_id:
-    (unused_scheme, host_port, unused_url_path,
-     unused_query, unused_fragment) = urlparse.urlsplit(url)
-    suffix_idx = host_port.find('.appspot.com')
-    if suffix_idx > -1:
-      arg_dict['app_id'] = host_port[:suffix_idx]
-    elif host_port.split(':')[0].endswith('google.com'):
-      arg_dict['app_id'] = host_port.split('.')[0]
-    else:
-      errors.append('app_id argument required for non appspot.com domains')
+    if url and url is not REQUIRED_OPTION:
+      (unused_scheme, host_port, unused_url_path,
+       unused_query, unused_fragment) = urlparse.urlsplit(url)
+      suffix_idx = host_port.find('.appspot.com')
+      if suffix_idx > -1:
+        arg_dict['app_id'] = host_port[:suffix_idx]
+      elif host_port.split(':')[0].endswith('google.com'):
+        arg_dict['app_id'] = host_port.split('.')[0]
+      else:
+        errors.append('app_id argument required for non appspot.com domains')
 
   if errors:
     print >>sys.stderr, '\n'.join(errors)
@@ -4203,50 +3600,68 @@
   result_db_filename = arg_dict['result_db_filename']
   loader_opts = arg_dict['loader_opts']
   exporter_opts = arg_dict['exporter_opts']
+  mapper_opts = arg_dict['mapper_opts']
   email = arg_dict['email']
   passin = arg_dict['passin']
+  perform_map = arg_dict['map']
+  dump = arg_dict['dump']
+  restore = arg_dict['restore']
 
   os.environ['AUTH_DOMAIN'] = auth_domain
 
   kind = ParseKind(kind)
 
-  check_file(config_file)
-  if not download:
+  if not dump and not restore:
+    check_file(config_file)
+
+  if download and perform_map:
+    logger.error('--download and --map are mutually exclusive.')
+
+  if download or dump:
+    check_output_file(filename)
+  elif not perform_map:
     check_file(filename)
+
+  if dump:
+    Exporter.RegisterExporter(DumpExporter(kind, result_db_filename))
+  elif restore:
+    Loader.RegisterLoader(RestoreLoader(kind))
   else:
-    check_output_file(filename)
-
-  LoadConfig(config_file)
+    LoadConfig(config_file)
 
   os.environ['APPLICATION_ID'] = app_id
 
   throttle_layout = ThrottleLayout(bandwidth_limit, http_limit, rps_limit)
-
-  throttle = Throttle(layout=throttle_layout)
+  logger.info('Throttling transfers:')
+  logger.info('Bandwidth: %s bytes/second', bandwidth_limit)
+  logger.info('HTTP connections: %s/second', http_limit)
+  logger.info('Entities inserted/fetched/modified: %s/second', rps_limit)
+
+  throttle = remote_api_throttle.Throttle(layout=throttle_layout)
   signature = _MakeSignature(app_id=app_id,
                              url=url,
                              kind=kind,
                              db_filename=db_filename,
                              download=download,
+                             perform_map=perform_map,
                              has_header=has_header,
-                             result_db_filename=result_db_filename)
+                             result_db_filename=result_db_filename,
+                             dump=dump,
+                             restore=restore)
 
 
   max_queue_size = max(DEFAULT_QUEUE_SIZE, 3 * num_threads + 5)
 
   if db_filename == 'skip':
     progress_db = StubProgressDatabase()
-  elif not download:
+  elif not download and not perform_map and not dump:
     progress_db = ProgressDatabase(db_filename, signature)
   else:
     progress_db = ExportProgressDatabase(db_filename, signature)
 
-  if download:
-    result_db = ResultDatabase(result_db_filename, signature)
-
   return_code = 1
 
-  if not download:
+  if not download and not perform_map and not dump:
     loader = Loader.RegisteredLoader(kind)
     try:
       loader.initialize(filename, loader_opts)
@@ -4257,12 +3672,10 @@
                             workitem_generator_factory,
                             throttle,
                             progress_db,
-                            BulkLoaderThread,
                             ProgressTrackerThread,
                             max_queue_size,
                             RequestManager,
                             DataSourceThread,
-                            ReQueue,
                             Queue.Queue)
       try:
         return_code = app.Run()
@@ -4270,29 +3683,31 @@
         logger.info('Authentication Failed')
     finally:
       loader.finalize()
-  else:
+  elif not perform_map:
+    result_db = ResultDatabase(result_db_filename, signature)
     exporter = Exporter.RegisteredExporter(kind)
     try:
       exporter.initialize(filename, exporter_opts)
 
-      def KeyRangeGeneratorFactory(progress_queue, progress_gen):
-        return KeyRangeGenerator(kind, progress_queue, progress_gen)
+      def KeyRangeGeneratorFactory(request_manager, progress_queue,
+                                   progress_gen):
+        return KeyRangeItemGenerator(request_manager, kind, progress_queue,
+                                     progress_gen, DownloadItem)
 
       def ExportProgressThreadFactory(progress_queue, progress_db):
         return ExportProgressThread(kind,
                                     progress_queue,
                                     progress_db,
                                     result_db)
+
       app = BulkDownloaderApp(arg_dict,
                               KeyRangeGeneratorFactory,
                               throttle,
                               progress_db,
-                              BulkExporterThread,
                               ExportProgressThreadFactory,
                               0,
                               RequestManager,
                               DataSourceThread,
-                              ReQueue,
                               Queue.Queue)
       try:
         return_code = app.Run()
@@ -4300,6 +3715,35 @@
         logger.info('Authentication Failed')
     finally:
       exporter.finalize()
+  elif not download:
+    mapper = Mapper.RegisteredMapper(kind)
+    try:
+      mapper.initialize(mapper_opts)
+      def KeyRangeGeneratorFactory(request_manager, progress_queue,
+                                   progress_gen):
+        return KeyRangeItemGenerator(request_manager, kind, progress_queue,
+                                     progress_gen, MapperItem)
+
+      def MapperProgressThreadFactory(progress_queue, progress_db):
+        return MapperProgressThread(kind,
+                                    progress_queue,
+                                    progress_db)
+
+      app = BulkMapperApp(arg_dict,
+                          KeyRangeGeneratorFactory,
+                          throttle,
+                          progress_db,
+                          MapperProgressThreadFactory,
+                          0,
+                          RequestManager,
+                          DataSourceThread,
+                          Queue.Queue)
+      try:
+        return_code = app.Run()
+      except AuthenticationError:
+        logger.info('Authentication Failed')
+    finally:
+      mapper.finalize()
   return return_code
 
 
@@ -4335,8 +3779,17 @@
 
   logger.info('Logging to %s', log_file)
 
+  remote_api_throttle.logger.setLevel(level)
+  remote_api_throttle.logger.addHandler(file_handler)
+  remote_api_throttle.logger.addHandler(console)
+
   appengine_rpc.logger.setLevel(logging.WARN)
 
+  adaptive_thread_pool.logger.setLevel(logging.DEBUG)
+  adaptive_thread_pool.logger.addHandler(console)
+  adaptive_thread_pool.logger.addHandler(file_handler)
+  adaptive_thread_pool.logger.propagate = False
+
 
 def Run(arg_dict):
   """Sets up and runs the bulkloader, given the options as keyword arguments.
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Mon Sep 07 20:27:37 2009 +0200
@@ -49,6 +49,7 @@
 import dummy_thread
 import email.Utils
 import errno
+import heapq
 import httplib
 import imp
 import inspect
@@ -61,13 +62,13 @@
 import pickle
 import pprint
 import random
+import select
 
 import re
 import sre_compile
 import sre_constants
 import sre_parse
 
-import mimetypes
 import socket
 import sys
 import time
@@ -91,6 +92,7 @@
 from google.appengine.api.capabilities import capability_stub
 from google.appengine.api.labs.taskqueue import taskqueue_stub
 from google.appengine.api.memcache import memcache_stub
+from google.appengine.api.xmpp import xmpp_service_stub
 
 from google.appengine import dist
 
@@ -111,11 +113,13 @@
 FOOTER_TEMPLATE = 'logging_console_footer.html'
 
 DEFAULT_ENV = {
-  'GATEWAY_INTERFACE': 'CGI/1.1',
-  'AUTH_DOMAIN': 'gmail.com',
-  'TZ': 'UTC',
+    'GATEWAY_INTERFACE': 'CGI/1.1',
+    'AUTH_DOMAIN': 'gmail.com',
+    'TZ': 'UTC',
 }
 
+DEFAULT_SELECT_DELAY = 30.0
+
 for ext, mime_type in (('.asc', 'text/plain'),
                        ('.diff', 'text/plain'),
                        ('.csv', 'text/comma-separated-values'),
@@ -134,19 +138,24 @@
                                               'site-packages'))
 
 
+
 class Error(Exception):
   """Base-class for exceptions in this module."""
 
+
 class InvalidAppConfigError(Error):
   """The supplied application configuration file is invalid."""
 
+
 class AppConfigNotFoundError(Error):
   """Application configuration file not found."""
 
+
 class TemplatesNotLoadedError(Error):
   """Templates for the debugging console were not loaded."""
 
 
+
 def SplitURL(relative_url):
   """Splits a relative URL into its path and query-string components.
 
@@ -159,7 +168,8 @@
       script_name: Relative URL of the script that was accessed.
       query_string: String containing everything after the '?' character.
   """
-  scheme, netloc, path, query, fragment = urlparse.urlsplit(relative_url)
+  (unused_scheme, unused_netloc, path, query,
+   unused_fragment) = urlparse.urlsplit(relative_url)
   return path, query
 
 
@@ -182,6 +192,7 @@
   return 'http://%s%s' % (netloc, relative_url)
 
 
+
 class URLDispatcher(object):
   """Base-class for handling HTTP requests."""
 
@@ -231,6 +242,7 @@
     Args:
       dispatched_output: StringIO buffer containing the results from the
        dispatched
+      original_output: The original output file.
     """
     original_output.write(dispatched_output.read())
 
@@ -267,6 +279,10 @@
         URL; False if anyone can access this URL.
       admin_only: True if the user must be a logged-in administrator to
         access the URL; False if anyone can access the URL.
+
+    Raises:
+      TypeError: if dispatcher is not a URLDispatcher sub-class instance.
+      InvalidAppConfigError: if regex isn't valid.
     """
     if not isinstance(dispatcher, URLDispatcher):
       raise TypeError('dispatcher must be a URLDispatcher sub-class')
@@ -294,6 +310,7 @@
 
     Args:
       relative_url: Relative URL being accessed in a request.
+      split_url: Used for dependency injection.
 
     Returns:
       Tuple (dispatcher, matched_path, requires_login, admin_only), which are
@@ -302,7 +319,7 @@
       replaced using values matched by the URL pattern. If no match was found,
       dispatcher will be None.
     """
-    adjusted_url, query_string = split_url(relative_url)
+    adjusted_url, unused_query_string = split_url(relative_url)
 
     for url_tuple in self._url_patterns:
       url_re, dispatcher, path, requires_login, admin_only = url_tuple
@@ -325,6 +342,7 @@
     return set([url_tuple[1] for url_tuple in self._url_patterns])
 
 
+
 class MatcherDispatcher(URLDispatcher):
   """Dispatcher across multiple URLMatcher instances."""
 
@@ -338,7 +356,8 @@
     Args:
       login_url: Relative URL which should be used for handling user logins.
       url_matchers: Sequence of URLMatcher objects.
-      get_user_info, login_redirect: Used for dependency injection.
+      get_user_info: Used for dependency injection.
+      login_redirect: Used for dependency injection.
     """
     self._login_url = login_url
     self._url_matchers = tuple(url_matchers)
@@ -359,30 +378,30 @@
     path variable supplied to this method is ignored.
     """
     cookies = ', '.join(headers.getheaders('cookie'))
-    email, admin, user_id = self._get_user_info(cookies)
+    email_addr, admin, user_id = self._get_user_info(cookies)
 
     for matcher in self._url_matchers:
-      dispatcher, matched_path, requires_login, admin_only = matcher.Match(relative_url)
+      dispatcher, matched_path, requires_login, admin_only = matcher.Match(
+          relative_url)
       if dispatcher is None:
         continue
 
       logging.debug('Matched "%s" to %s with path %s',
                     relative_url, dispatcher, matched_path)
 
-      if (requires_login or admin_only) and not email:
+      if (requires_login or admin_only) and not email_addr:
         logging.debug('Login required, redirecting user')
-        self._login_redirect(
-          self._login_url,
-          base_env_dict['SERVER_NAME'],
-          base_env_dict['SERVER_PORT'],
-          relative_url,
-          outfile)
+        self._login_redirect(self._login_url,
+                             base_env_dict['SERVER_NAME'],
+                             base_env_dict['SERVER_PORT'],
+                             relative_url,
+                             outfile)
       elif admin_only and not admin:
         outfile.write('Status: %d Not authorized\r\n'
                       '\r\n'
                       'Current logged in user %s is not '
                       'authorized to view this page.'
-                      % (httplib.FORBIDDEN, email))
+                      % (httplib.FORBIDDEN, email_addr))
       else:
         forward = dispatcher.Dispatch(relative_url,
                                       matched_path,
@@ -393,7 +412,7 @@
 
         if forward:
           new_path, new_headers, new_input = forward
-          logging.info('Internal redirection to %s' % new_path)
+          logging.info('Internal redirection to %s', new_path)
           new_outfile = cStringIO.StringIO()
           self.Dispatch(new_path,
                         None,
@@ -413,6 +432,7 @@
                   % (httplib.NOT_FOUND, relative_url))
 
 
+
 class ApplicationLoggingHandler(logging.Handler):
   """Python Logging handler that displays the debugging console to users."""
 
@@ -487,7 +507,7 @@
       outfile: Output stream to which the console should be written if either
         a debug parameter was supplied or a logging cookie is present.
     """
-    script_name, query_string = SplitURL(relative_url)
+    unused_script_name, query_string = SplitURL(relative_url)
     param_dict = cgi.parse_qs(query_string, True)
     cookie_dict = Cookie.SimpleCookie(env.get('HTTP_COOKIE', ''))
     if 'debug' not in param_dict and self._COOKIE_NAME not in cookie_dict:
@@ -554,8 +574,8 @@
   env['CONTENT_LENGTH'] = headers.getheader('content-length', '')
 
   cookies = ', '.join(headers.getheaders('cookie'))
-  email, admin, user_id = get_user_info(cookies)
-  env['USER_EMAIL'] = email
+  email_addr, admin, user_id = get_user_info(cookies)
+  env['USER_EMAIL'] = email_addr
   env['USER_ID'] = user_id
   if admin:
     env['USER_IS_ADMIN'] = '1'
@@ -583,12 +603,11 @@
   """Fake for methods/functions that are not implemented in the production
   environment.
   """
-  raise NotImplementedError("This class/method is not available.")
+  raise NotImplementedError('This class/method is not available.')
 
 
 class NotImplementedFakeClass(object):
-  """Fake class for classes that are not implemented in the production
-  environment.
+  """Fake class for classes that are not implemented in the production env.
   """
   __init__ = NotImplementedFake
 
@@ -627,7 +646,7 @@
 def FakeURandom(n):
   """Fake version of os.urandom."""
   bytes = ''
-  for i in xrange(n):
+  for _ in range(n):
     bytes += chr(random.randint(0, 255))
   return bytes
 
@@ -665,9 +684,9 @@
   return original_setlocale(category, 'C')
 
 
-def FakeOpen(file, flags, mode=0777):
+def FakeOpen(filename, flags, mode=0777):
   """Fake version of os.open."""
-  raise OSError(errno.EPERM, "Operation not permitted", file)
+  raise OSError(errno.EPERM, "Operation not permitted", filename)
 
 
 def FakeRename(src, dst):
@@ -711,27 +730,27 @@
   return False
 
 SHARED_MODULE_PREFIXES = set([
-  'google',
-  'logging',
-  'sys',
-  'warnings',
-
-
-
-
-  're',
-  'sre_compile',
-  'sre_constants',
-  'sre_parse',
-
-
-
-
-  'wsgiref',
+    'google',
+    'logging',
+    'sys',
+    'warnings',
+
+
+
+
+    're',
+    'sre_compile',
+    'sre_constants',
+    'sre_parse',
+
+
+
+
+    'wsgiref',
 ])
 
 NOT_SHARED_MODULE_PREFIXES = set([
-  'google.appengine.ext',
+    'google.appengine.ext',
 ])
 
 
@@ -788,7 +807,7 @@
 
 
 def GeneratePythonPaths(*p):
-  """Generate all valid filenames for the given file
+  """Generate all valid filenames for the given file.
 
   Args:
     p: Positional args are the folders to the file and finally the file
@@ -814,8 +833,8 @@
                       if os.path.isfile(filename))
 
   ALLOWED_DIRS = set([
-    os.path.normcase(os.path.realpath(os.path.dirname(os.__file__))),
-    os.path.normcase(os.path.abspath(os.path.dirname(os.__file__))),
+      os.path.normcase(os.path.realpath(os.path.dirname(os.__file__))),
+      os.path.normcase(os.path.abspath(os.path.dirname(os.__file__))),
   ])
 
   NOT_ALLOWED_DIRS = set([
@@ -823,58 +842,58 @@
 
 
 
-    SITE_PACKAGES,
+      SITE_PACKAGES,
   ])
 
   ALLOWED_SITE_PACKAGE_DIRS = set(
-    os.path.normcase(os.path.abspath(os.path.join(SITE_PACKAGES, path)))
-    for path in [
-
-  ])
+      os.path.normcase(os.path.abspath(os.path.join(SITE_PACKAGES, path)))
+      for path in [
+
+          ])
 
   ALLOWED_SITE_PACKAGE_FILES = set(
-    os.path.normcase(os.path.abspath(os.path.join(
-      os.path.dirname(os.__file__), 'site-packages', path)))
-    for path in itertools.chain(*[
-
-      [os.path.join('Crypto')],
-      GeneratePythonPaths('Crypto', '__init__'),
-      [os.path.join('Crypto', 'Cipher')],
-      GeneratePythonPaths('Crypto', 'Cipher', '__init__'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'AES'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'ARC2'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'ARC4'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'Blowfish'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'CAST'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'DES'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'DES3'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'XOR'),
-      [os.path.join('Crypto', 'Hash')],
-      GeneratePythonPaths('Crypto', 'Hash', '__init__'),
-      GeneratePythonPaths('Crypto', 'Hash', 'HMAC'),
-      os.path.join('Crypto', 'Hash', 'MD2'),
-      os.path.join('Crypto', 'Hash', 'MD4'),
-      GeneratePythonPaths('Crypto', 'Hash', 'MD5'),
-      GeneratePythonPaths('Crypto', 'Hash', 'SHA'),
-      os.path.join('Crypto', 'Hash', 'SHA256'),
-      os.path.join('Crypto', 'Hash', 'RIPEMD'),
-      [os.path.join('Crypto', 'Protocol')],
-      GeneratePythonPaths('Crypto', 'Protocol', '__init__'),
-      GeneratePythonPaths('Crypto', 'Protocol', 'AllOrNothing'),
-      GeneratePythonPaths('Crypto', 'Protocol', 'Chaffing'),
-      [os.path.join('Crypto', 'PublicKey')],
-      GeneratePythonPaths('Crypto', 'PublicKey', '__init__'),
-      GeneratePythonPaths('Crypto', 'PublicKey', 'DSA'),
-      GeneratePythonPaths('Crypto', 'PublicKey', 'ElGamal'),
-      GeneratePythonPaths('Crypto', 'PublicKey', 'RSA'),
-      GeneratePythonPaths('Crypto', 'PublicKey', 'pubkey'),
-      GeneratePythonPaths('Crypto', 'PublicKey', 'qNEW'),
-      [os.path.join('Crypto', 'Util')],
-      GeneratePythonPaths('Crypto', 'Util', '__init__'),
-      GeneratePythonPaths('Crypto', 'Util', 'RFC1751'),
-      GeneratePythonPaths('Crypto', 'Util', 'number'),
-      GeneratePythonPaths('Crypto', 'Util', 'randpool'),
-  ]))
+      os.path.normcase(os.path.abspath(os.path.join(
+          os.path.dirname(os.__file__), 'site-packages', path)))
+      for path in itertools.chain(*[
+
+          [os.path.join('Crypto')],
+          GeneratePythonPaths('Crypto', '__init__'),
+          [os.path.join('Crypto', 'Cipher')],
+          GeneratePythonPaths('Crypto', 'Cipher', '__init__'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'AES'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'ARC2'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'ARC4'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'Blowfish'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'CAST'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'DES'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'DES3'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'XOR'),
+          [os.path.join('Crypto', 'Hash')],
+          GeneratePythonPaths('Crypto', 'Hash', '__init__'),
+          GeneratePythonPaths('Crypto', 'Hash', 'HMAC'),
+          os.path.join('Crypto', 'Hash', 'MD2'),
+          os.path.join('Crypto', 'Hash', 'MD4'),
+          GeneratePythonPaths('Crypto', 'Hash', 'MD5'),
+          GeneratePythonPaths('Crypto', 'Hash', 'SHA'),
+          os.path.join('Crypto', 'Hash', 'SHA256'),
+          os.path.join('Crypto', 'Hash', 'RIPEMD'),
+          [os.path.join('Crypto', 'Protocol')],
+          GeneratePythonPaths('Crypto', 'Protocol', '__init__'),
+          GeneratePythonPaths('Crypto', 'Protocol', 'AllOrNothing'),
+          GeneratePythonPaths('Crypto', 'Protocol', 'Chaffing'),
+          [os.path.join('Crypto', 'PublicKey')],
+          GeneratePythonPaths('Crypto', 'PublicKey', '__init__'),
+          GeneratePythonPaths('Crypto', 'PublicKey', 'DSA'),
+          GeneratePythonPaths('Crypto', 'PublicKey', 'ElGamal'),
+          GeneratePythonPaths('Crypto', 'PublicKey', 'RSA'),
+          GeneratePythonPaths('Crypto', 'PublicKey', 'pubkey'),
+          GeneratePythonPaths('Crypto', 'PublicKey', 'qNEW'),
+          [os.path.join('Crypto', 'Util')],
+          GeneratePythonPaths('Crypto', 'Util', '__init__'),
+          GeneratePythonPaths('Crypto', 'Util', 'RFC1751'),
+          GeneratePythonPaths('Crypto', 'Util', 'number'),
+          GeneratePythonPaths('Crypto', 'Util', 'randpool'),
+          ]))
 
   _original_file = file
 
@@ -912,7 +931,7 @@
 
   @staticmethod
   def SetAllowSkippedFiles(allow_skipped_files):
-    """Configures access to files matching FakeFile._skip_files
+    """Configures access to files matching FakeFile._skip_files.
 
     Args:
       allow_skipped_files: Boolean whether to allow access to skipped files
@@ -1106,6 +1125,7 @@
   return fullname.rsplit('.', 1)[-1]
 
 
+
 class CouldNotFindModuleError(ImportError):
   """Raised when a module could not be found.
 
@@ -1115,10 +1135,19 @@
 
 
 def Trace(func):
-  """Decorator that logs the call stack of the HardenedModulesHook class as
+  """Call stack logging decorator for HardenedModulesHook class.
+
+  This decorator logs the call stack of the HardenedModulesHook class as
   it executes, indenting logging messages based on the current stack depth.
+
+  Args:
+    func: the function to decorate.
+
+  Returns:
+    The decorated function.
   """
-  def decorate(self, *args, **kwargs):
+
+  def Decorate(self, *args, **kwargs):
     args_to_show = []
     if args is not None:
       args_to_show.extend(str(argument) for argument in args)
@@ -1136,7 +1165,7 @@
       self._indent_level -= 1
       self.log('Exiting %s(%s)', func.func_name, args_string)
 
-  return decorate
+  return Decorate
 
 
 class HardenedModulesHook(object):
@@ -1173,229 +1202,229 @@
       print >>sys.stderr, indent + (message % args)
 
   _WHITE_LIST_C_MODULES = [
-    'AES',
-    'ARC2',
-    'ARC4',
-    'Blowfish',
-    'CAST',
-    'DES',
-    'DES3',
-    'MD2',
-    'MD4',
-    'RIPEMD',
-    'SHA256',
-    'XOR',
-
-    '_Crypto_Cipher__AES',
-    '_Crypto_Cipher__ARC2',
-    '_Crypto_Cipher__ARC4',
-    '_Crypto_Cipher__Blowfish',
-    '_Crypto_Cipher__CAST',
-    '_Crypto_Cipher__DES',
-    '_Crypto_Cipher__DES3',
-    '_Crypto_Cipher__XOR',
-    '_Crypto_Hash__MD2',
-    '_Crypto_Hash__MD4',
-    '_Crypto_Hash__RIPEMD',
-    '_Crypto_Hash__SHA256',
-    'array',
-    'binascii',
-    'bz2',
-    'cmath',
-    'collections',
-    'crypt',
-    'cStringIO',
-    'datetime',
-    'errno',
-    'exceptions',
-    'gc',
-    'itertools',
-    'math',
-    'md5',
-    'operator',
-    'posix',
-    'posixpath',
-    'pyexpat',
-    'sha',
-    'struct',
-    'sys',
-    'time',
-    'timing',
-    'unicodedata',
-    'zlib',
-    '_ast',
-    '_bisect',
-    '_codecs',
-    '_codecs_cn',
-    '_codecs_hk',
-    '_codecs_iso2022',
-    '_codecs_jp',
-    '_codecs_kr',
-    '_codecs_tw',
-    '_collections',
-    '_csv',
-    '_elementtree',
-    '_functools',
-    '_hashlib',
-    '_heapq',
-    '_locale',
-    '_lsprof',
-    '_md5',
-    '_multibytecodec',
-    '_random',
-    '_sha',
-    '_sha256',
-    '_sha512',
-    '_sre',
-    '_struct',
-    '_types',
-    '_weakref',
-    '__main__',
+      'AES',
+      'ARC2',
+      'ARC4',
+      'Blowfish',
+      'CAST',
+      'DES',
+      'DES3',
+      'MD2',
+      'MD4',
+      'RIPEMD',
+      'SHA256',
+      'XOR',
+
+      '_Crypto_Cipher__AES',
+      '_Crypto_Cipher__ARC2',
+      '_Crypto_Cipher__ARC4',
+      '_Crypto_Cipher__Blowfish',
+      '_Crypto_Cipher__CAST',
+      '_Crypto_Cipher__DES',
+      '_Crypto_Cipher__DES3',
+      '_Crypto_Cipher__XOR',
+      '_Crypto_Hash__MD2',
+      '_Crypto_Hash__MD4',
+      '_Crypto_Hash__RIPEMD',
+      '_Crypto_Hash__SHA256',
+      'array',
+      'binascii',
+      'bz2',
+      'cmath',
+      'collections',
+      'crypt',
+      'cStringIO',
+      'datetime',
+      'errno',
+      'exceptions',
+      'gc',
+      'itertools',
+      'math',
+      'md5',
+      'operator',
+      'posix',
+      'posixpath',
+      'pyexpat',
+      'sha',
+      'struct',
+      'sys',
+      'time',
+      'timing',
+      'unicodedata',
+      'zlib',
+      '_ast',
+      '_bisect',
+      '_codecs',
+      '_codecs_cn',
+      '_codecs_hk',
+      '_codecs_iso2022',
+      '_codecs_jp',
+      '_codecs_kr',
+      '_codecs_tw',
+      '_collections',
+      '_csv',
+      '_elementtree',
+      '_functools',
+      '_hashlib',
+      '_heapq',
+      '_locale',
+      '_lsprof',
+      '_md5',
+      '_multibytecodec',
+      '_random',
+      '_sha',
+      '_sha256',
+      '_sha512',
+      '_sre',
+      '_struct',
+      '_types',
+      '_weakref',
+      '__main__',
   ]
 
   __CRYPTO_CIPHER_ALLOWED_MODULES = [
-    'MODE_CBC',
-    'MODE_CFB',
-    'MODE_CTR',
-    'MODE_ECB',
-    'MODE_OFB',
-    'block_size',
-    'key_size',
-    'new',
+      'MODE_CBC',
+      'MODE_CFB',
+      'MODE_CTR',
+      'MODE_ECB',
+      'MODE_OFB',
+      'block_size',
+      'key_size',
+      'new',
   ]
   _WHITE_LIST_PARTIAL_MODULES = {
-    'Crypto.Cipher.AES': __CRYPTO_CIPHER_ALLOWED_MODULES,
-    'Crypto.Cipher.ARC2': __CRYPTO_CIPHER_ALLOWED_MODULES,
-    'Crypto.Cipher.Blowfish': __CRYPTO_CIPHER_ALLOWED_MODULES,
-    'Crypto.Cipher.CAST': __CRYPTO_CIPHER_ALLOWED_MODULES,
-    'Crypto.Cipher.DES': __CRYPTO_CIPHER_ALLOWED_MODULES,
-    'Crypto.Cipher.DES3': __CRYPTO_CIPHER_ALLOWED_MODULES,
-
-    'gc': [
-      'enable',
-      'disable',
-      'isenabled',
-      'collect',
-      'get_debug',
-      'set_threshold',
-      'get_threshold',
-      'get_count'
-    ],
-
-
-
-    'os': [
-      'access',
-      'altsep',
-      'curdir',
-      'defpath',
-      'devnull',
-      'environ',
-      'error',
-      'extsep',
-      'EX_NOHOST',
-      'EX_NOINPUT',
-      'EX_NOPERM',
-      'EX_NOUSER',
-      'EX_OK',
-      'EX_OSERR',
-      'EX_OSFILE',
-      'EX_PROTOCOL',
-      'EX_SOFTWARE',
-      'EX_TEMPFAIL',
-      'EX_UNAVAILABLE',
-      'EX_USAGE',
-      'F_OK',
-      'getcwd',
-      'getcwdu',
-      'getenv',
-      'listdir',
-      'lstat',
-      'name',
-      'NGROUPS_MAX',
-      'O_APPEND',
-      'O_CREAT',
-      'O_DIRECT',
-      'O_DIRECTORY',
-      'O_DSYNC',
-      'O_EXCL',
-      'O_LARGEFILE',
-      'O_NDELAY',
-      'O_NOCTTY',
-      'O_NOFOLLOW',
-      'O_NONBLOCK',
-      'O_RDONLY',
-      'O_RDWR',
-      'O_RSYNC',
-      'O_SYNC',
-      'O_TRUNC',
-      'O_WRONLY',
-      'open',
-      'pardir',
-      'path',
-      'pathsep',
-      'R_OK',
-      'readlink',
-      'remove',
-      'rename',
-      'SEEK_CUR',
-      'SEEK_END',
-      'SEEK_SET',
-      'sep',
-      'stat',
-      'stat_float_times',
-      'stat_result',
-      'strerror',
-      'TMP_MAX',
-      'unlink',
-      'urandom',
-      'utime',
-      'walk',
-      'WCOREDUMP',
-      'WEXITSTATUS',
-      'WIFEXITED',
-      'WIFSIGNALED',
-      'WIFSTOPPED',
-      'WNOHANG',
-      'WSTOPSIG',
-      'WTERMSIG',
-      'WUNTRACED',
-      'W_OK',
-      'X_OK',
-    ],
+      'Crypto.Cipher.AES': __CRYPTO_CIPHER_ALLOWED_MODULES,
+      'Crypto.Cipher.ARC2': __CRYPTO_CIPHER_ALLOWED_MODULES,
+      'Crypto.Cipher.Blowfish': __CRYPTO_CIPHER_ALLOWED_MODULES,
+      'Crypto.Cipher.CAST': __CRYPTO_CIPHER_ALLOWED_MODULES,
+      'Crypto.Cipher.DES': __CRYPTO_CIPHER_ALLOWED_MODULES,
+      'Crypto.Cipher.DES3': __CRYPTO_CIPHER_ALLOWED_MODULES,
+
+      'gc': [
+          'enable',
+          'disable',
+          'isenabled',
+          'collect',
+          'get_debug',
+          'set_threshold',
+          'get_threshold',
+          'get_count'
+      ],
+
+
+
+      'os': [
+          'access',
+          'altsep',
+          'curdir',
+          'defpath',
+          'devnull',
+          'environ',
+          'error',
+          'extsep',
+          'EX_NOHOST',
+          'EX_NOINPUT',
+          'EX_NOPERM',
+          'EX_NOUSER',
+          'EX_OK',
+          'EX_OSERR',
+          'EX_OSFILE',
+          'EX_PROTOCOL',
+          'EX_SOFTWARE',
+          'EX_TEMPFAIL',
+          'EX_UNAVAILABLE',
+          'EX_USAGE',
+          'F_OK',
+          'getcwd',
+          'getcwdu',
+          'getenv',
+          'listdir',
+          'lstat',
+          'name',
+          'NGROUPS_MAX',
+          'O_APPEND',
+          'O_CREAT',
+          'O_DIRECT',
+          'O_DIRECTORY',
+          'O_DSYNC',
+          'O_EXCL',
+          'O_LARGEFILE',
+          'O_NDELAY',
+          'O_NOCTTY',
+          'O_NOFOLLOW',
+          'O_NONBLOCK',
+          'O_RDONLY',
+          'O_RDWR',
+          'O_RSYNC',
+          'O_SYNC',
+          'O_TRUNC',
+          'O_WRONLY',
+          'open',
+          'pardir',
+          'path',
+          'pathsep',
+          'R_OK',
+          'readlink',
+          'remove',
+          'rename',
+          'SEEK_CUR',
+          'SEEK_END',
+          'SEEK_SET',
+          'sep',
+          'stat',
+          'stat_float_times',
+          'stat_result',
+          'strerror',
+          'TMP_MAX',
+          'unlink',
+          'urandom',
+          'utime',
+          'walk',
+          'WCOREDUMP',
+          'WEXITSTATUS',
+          'WIFEXITED',
+          'WIFSIGNALED',
+          'WIFSTOPPED',
+          'WNOHANG',
+          'WSTOPSIG',
+          'WTERMSIG',
+          'WUNTRACED',
+          'W_OK',
+          'X_OK',
+      ],
   }
 
   _MODULE_OVERRIDES = {
-    'locale': {
-      'setlocale': FakeSetLocale,
-    },
-
-    'os': {
-      'access': FakeAccess,
-      'listdir': RestrictedPathFunction(os.listdir),
-
-      'lstat': RestrictedPathFunction(os.stat),
-      'open': FakeOpen,
-      'readlink': FakeReadlink,
-      'remove': FakeUnlink,
-      'rename': FakeRename,
-      'stat': RestrictedPathFunction(os.stat),
-      'uname': FakeUname,
-      'unlink': FakeUnlink,
-      'urandom': FakeURandom,
-      'utime': FakeUTime,
-    },
-
-    'distutils.util': {
-      'get_platform': FakeGetPlatform,
-    },
+      'locale': {
+          'setlocale': FakeSetLocale,
+      },
+
+      'os': {
+          'access': FakeAccess,
+          'listdir': RestrictedPathFunction(os.listdir),
+
+          'lstat': RestrictedPathFunction(os.stat),
+          'open': FakeOpen,
+          'readlink': FakeReadlink,
+          'remove': FakeUnlink,
+          'rename': FakeRename,
+          'stat': RestrictedPathFunction(os.stat),
+          'uname': FakeUname,
+          'unlink': FakeUnlink,
+          'urandom': FakeURandom,
+          'utime': FakeUTime,
+      },
+
+      'distutils.util': {
+          'get_platform': FakeGetPlatform,
+      },
   }
 
   _ENABLED_FILE_TYPES = (
-    imp.PKG_DIRECTORY,
-    imp.PY_SOURCE,
-    imp.PY_COMPILED,
-    imp.C_BUILTIN,
+      imp.PKG_DIRECTORY,
+      imp.PY_SOURCE,
+      imp.PY_COMPILED,
+      imp.C_BUILTIN,
   )
 
   def __init__(self,
@@ -1822,6 +1851,7 @@
     return compile(source_code, full_path, 'exec')
 
 
+
 def ModuleHasValidMainFunction(module):
   """Determines if a module has a main function that takes no arguments.
 
@@ -1835,7 +1865,8 @@
     True if the module has a valid, reusable main function; False otherwise.
   """
   if hasattr(module, 'main') and type(module.main) is types.FunctionType:
-    arg_names, var_args, var_kwargs, default_values = inspect.getargspec(module.main)
+    arg_names, var_args, var_kwargs, default_values = inspect.getargspec(
+        module.main)
     if len(arg_names) == 0:
       return True
     if default_values is not None and len(arg_names) == len(default_values):
@@ -1878,6 +1909,7 @@
     cgi_path: Absolute path of the CGI module file on disk.
     module_fullname: Fully qualified Python module name used to import the
       cgi_path module.
+    isfile: Used for testing.
 
   Returns:
     List containing the paths to the missing __init__.py files.
@@ -1935,7 +1967,7 @@
   module_fullname = GetScriptModuleName(handler_path)
   script_module = module_dict.get(module_fullname)
   module_code = None
-  if script_module != None and ModuleHasValidMainFunction(script_module):
+  if script_module is not None and ModuleHasValidMainFunction(script_module):
     logging.debug('Reusing main() function of module "%s"', module_fullname)
   else:
     if script_module is None:
@@ -1944,7 +1976,8 @@
 
     try:
       module_code = import_hook.get_code(module_fullname)
-      full_path, search_path, submodule = import_hook.GetModuleInfo(module_fullname)
+      full_path, search_path, submodule = (
+        import_hook.GetModuleInfo(module_fullname))
       script_module.__file__ = full_path
       if search_path is not None:
         script_module.__path__ = search_path
@@ -1955,7 +1988,7 @@
         import_error_message += ': ' + str(exc_value)
 
       logging.exception('Encountered error loading module "%s": %s',
-                    module_fullname, import_error_message)
+                        module_fullname, import_error_message)
       missing_inits = FindMissingInitFiles(cgi_path, module_fullname)
       if missing_inits:
         logging.warning('Missing package initialization files: %s',
@@ -1989,8 +2022,10 @@
 
 
 def ExecuteOrImportScript(handler_path, cgi_path, import_hook):
-  """Executes a CGI script by importing it as a new module; possibly reuses
-  the module's main() function if it is defined and takes no arguments.
+  """Executes a CGI script by importing it as a new module.
+
+  This possibly reuses the module's main() function if it is defined and
+  takes no arguments.
 
   Basic technique lifted from PEP 338 and Python2.5's runpy module. See:
     http://www.python.org/dev/peps/pep-0338/
@@ -2260,6 +2295,7 @@
     return 'Local CGI dispatcher for %s' % self._cgi_func
 
 
+
 class PathAdjuster(object):
   """Adjusts application file paths to paths relative to the application or
   external library directories."""
@@ -2273,8 +2309,10 @@
     self._root_path = os.path.abspath(root_path)
 
   def AdjustPath(self, path):
-    """Adjusts application file path to paths relative to the application or
-    external library directories.
+    """Adjusts application file paths to relative to the application.
+
+    More precisely this method adjusts application file path to paths
+    relative to the application or external library directories.
 
     Handler paths that start with $PYTHON_LIB will be converted to paths
     relative to the google directory.
@@ -2294,6 +2332,7 @@
     return path
 
 
+
 class StaticFileConfigMatcher(object):
   """Keeps track of file/directory specific application configuration.
 
@@ -2382,13 +2421,13 @@
       String containing the mime type to use. Will be 'application/octet-stream'
       if we have no idea what it should be.
     """
-    for (path_re, mime_type, expiration) in self._patterns:
-      if mime_type is not None:
+    for (path_re, mimetype, unused_expiration) in self._patterns:
+      if mimetype is not None:
         the_match = path_re.match(path)
         if the_match:
-          return mime_type
-
-    filename, extension = os.path.splitext(path)
+          return mimetype
+
+    unused_filename, extension = os.path.splitext(path)
     return mimetypes.types_map.get(extension, 'application/octet-stream')
 
   def GetExpiration(self, path):
@@ -2400,7 +2439,7 @@
     Returns:
       Integer number of seconds to be used for browser cache expiration time.
     """
-    for (path_re, mime_type, expiration) in self._patterns:
+    for (path_re, unused_mimetype, expiration) in self._patterns:
       the_match = path_re.match(path)
       if the_match:
         return expiration
@@ -2409,6 +2448,7 @@
 
 
 
+
 def ReadDataFile(data_path, openfile=file):
   """Reads a file on disk, returning a corresponding HTTP status and data.
 
@@ -2548,6 +2588,8 @@
   """Update the cache header."""
   if not 'Cache-Control' in headers:
     headers['Cache-Control'] = 'no-cache'
+    if not 'Expires' in headers:
+      headers['Expires'] = 'Fri, 01 Jan 1990 00:00:00 GMT'
   return status_code, status_message, headers, body
 
 
@@ -2609,7 +2651,7 @@
           ParseStatusRewriter,
           CacheRewriter,
           ContentLengthRewriter,
-  ]
+         ]
 
 
 def RewriteResponse(response_file, response_rewriters=None):
@@ -2663,6 +2705,7 @@
   return status_code, status_message, header_data, response_file.read()
 
 
+
 class ModuleManager(object):
   """Manages loaded modules in the runtime.
 
@@ -2695,7 +2738,7 @@
       Path of the module's corresponding Python source file if it exists, or
       just the module's compiled Python file. If the module has an invalid
       __file__ attribute, None will be returned.
-      """
+    """
     module_file = getattr(module, '__file__', None)
     if module_file is None:
       return None
@@ -2727,8 +2770,7 @@
     return False
 
   def UpdateModuleFileModificationTimes(self):
-    """Records the current modification times of all monitored modules.
-    """
+    """Records the current modification times of all monitored modules."""
     self._modification_times.clear()
     for name, module in self._modules.items():
       if not isinstance(module, types.ModuleType):
@@ -2750,23 +2792,29 @@
     sys.path_hooks[:] = self._save_path_hooks
 
 
+
 def _ClearTemplateCache(module_dict=sys.modules):
   """Clear template cache in webapp.template module.
 
   Attempts to load template module.  Ignores failure.  If module loads, the
   template cache is cleared.
+
+  Args:
+    module_dict: Used for dependency injection.
   """
   template_module = module_dict.get('google.appengine.ext.webapp.template')
   if template_module is not None:
     template_module.template_cache.clear()
 
 
+
 def CreateRequestHandler(root_path,
                          login_url,
                          require_indexes=False,
                          static_caching=True):
-  """Creates a new BaseHTTPRequestHandler sub-class for use with the Python
-  BaseHTTPServer module's HTTP server.
+  """Creates a new BaseHTTPRequestHandler sub-class.
+
+  This class will be used with the Python BaseHTTPServer module's HTTP server.
 
   Python's built-in HTTP server does not support passing context information
   along to instances of its request handlers. This function gets around that
@@ -2792,9 +2840,11 @@
   application_config_cache = AppConfigCache()
 
   class DevAppServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-    """Dispatches URLs using patterns from a URLMatcher, which is created by
-    loading an application's configuration file. Executes CGI scripts in the
-    local process so the scripts can use mock versions of APIs.
+    """Dispatches URLs using patterns from a URLMatcher.
+
+    The URLMatcher is created by loading an application's configuration file.
+    Executes CGI scripts in the local process so the scripts can use mock
+    versions of APIs.
 
     HTTP requests that correctly specify a user info cookie
     (dev_appserver_login.COOKIE_NAME) will have the 'USER_EMAIL' environment
@@ -2819,13 +2869,13 @@
       """Initializer.
 
       Args:
-        args, kwargs: Positional and keyword arguments passed to the constructor
-          of the super class.
+        args: Positional arguments passed to the superclass constructor.
+        kwargs: Keyword arguments passed to the superclass constructor.
       """
       BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
 
     def version_string(self):
-      """Returns server's version string used for Server HTTP header"""
+      """Returns server's version string used for Server HTTP header."""
       return self.server_version
 
     def do_GET(self):
@@ -2862,12 +2912,12 @@
       server_name = server_name.split(':', 1)[0]
 
       env_dict = {
-        'REQUEST_METHOD': self.command,
-        'REMOTE_ADDR': self.client_address[0],
-        'SERVER_SOFTWARE': self.server_version,
-        'SERVER_NAME': server_name,
-        'SERVER_PROTOCOL': self.protocol_version,
-        'SERVER_PORT': str(self.server.server_port),
+          'REQUEST_METHOD': self.command,
+          'REMOTE_ADDR': self.client_address[0],
+          'SERVER_SOFTWARE': self.server_version,
+          'SERVER_NAME': server_name,
+          'SERVER_PROTOCOL': self.protocol_version,
+          'SERVER_PORT': str(self.server.server_port),
       }
 
       full_url = GetFullURL(server_name, self.server.server_port, self.path)
@@ -2889,8 +2939,9 @@
                                                  cache=self.config_cache,
                                                  static_caching=static_caching)
         if config.api_version != API_VERSION:
-          logging.error("API versions cannot be switched dynamically: %r != %r"
-                        % (config.api_version, API_VERSION))
+          logging.error(
+              "API versions cannot be switched dynamically: %r != %r",
+              config.api_version, API_VERSION)
           sys.exit(1)
         env_dict['CURRENT_VERSION_ID'] = config.version + ".1"
         env_dict['APPLICATION_ID'] = config.application
@@ -2927,7 +2978,8 @@
         outfile.flush()
         outfile.seek(0)
 
-        status_code, status_message, header_data, body = RewriteResponse(outfile, self.rewriter_chain)
+        status_code, status_message, header_data, body = (
+            RewriteResponse(outfile, self.rewriter_chain))
 
         runtime_response_size = len(outfile.getvalue())
         if runtime_response_size > MAX_RUNTIME_RESPONSE_SIZE:
@@ -2984,6 +3036,7 @@
   return DevAppServerRequestHandler
 
 
+
 def ReadAppConfig(appinfo_path, parse_app_config=appinfo.LoadSingleAppInfo):
   """Reads app.yaml file and returns its app id and list of URLMap instances.
 
@@ -3001,9 +3054,9 @@
   """
   try:
     appinfo_file = file(appinfo_path, 'r')
-  except IOError, e:
+  except IOError, unused_e:
     raise InvalidAppConfigError(
-      'Application configuration could not be read from "%s"' % appinfo_path)
+        'Application configuration could not be read from "%s"' % appinfo_path)
   try:
     return parse_app_config(appinfo_file)
   finally:
@@ -3035,11 +3088,17 @@
     default_expiration: String describing default expiration time for browser
       based caching of static files.  If set to None this disallows any
       browser caching of static content.
-    create_url_matcher, create_cgi_dispatcher, create_file_dispatcher,
+    create_url_matcher: Used for dependency injection.
+    create_cgi_dispatcher: Used for dependency injection.
+    create_file_dispatcher: Used for dependency injection.
     create_path_adjuster: Used for dependency injection.
+    normpath: Used for dependency injection.
 
   Returns:
     Instance of URLMatcher with the supplied URLMap objects properly loaded.
+
+  Raises:
+    InvalidAppConfigError: if the handler in url_map_list is an unknown type.
   """
   url_matcher = create_url_matcher()
   path_adjuster = create_path_adjuster(root_path)
@@ -3121,10 +3180,14 @@
       sys.modules dictionary.
     cache: Instance of AppConfigCache or None.
     static_caching: True if browser caching of static files should be allowed.
-    read_app_config, create_matcher: Used for dependency injection.
+    read_app_config: Used for dependency injection.
+    create_matcher: Used for dependency injection.
 
   Returns:
      tuple: (AppInfoExternal, URLMatcher)
+
+  Raises:
+    AppConfigNotFound: if an app.yaml file cannot be found.
   """
   for appinfo_path in [os.path.join(root_path, 'app.yaml'),
                        os.path.join(root_path, 'app.yml')]:
@@ -3181,23 +3244,26 @@
   Raises:
     If the config file is unreadable, empty or invalid, this function will
     raise an InvalidAppConfigError or a MalformedCronConfiguration exception.
-    """
+  """
   try:
     croninfo_file = file(croninfo_path, 'r')
   except IOError, e:
     raise InvalidAppConfigError(
-        'Cron configuration could not be read from "%s"' % croninfo_path)
+        'Cron configuration could not be read from "%s": %s'
+        % (croninfo_path, e))
   try:
     return parse_cron_config(croninfo_file)
   finally:
     croninfo_file.close()
 
 
+
 def SetupStubs(app_id, **config):
   """Sets up testing stubs of APIs.
 
   Args:
     app_id: Application ID being served.
+    config: keyword arguments.
 
   Keywords:
     root_path: Root path to the directory of the application which should
@@ -3256,47 +3322,53 @@
                                 dev_appserver_login.LOGOUT_PARAM)
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'user',
-    user_service_stub.UserServiceStub(login_url=fixed_login_url,
-                                      logout_url=fixed_logout_url))
+      'user',
+      user_service_stub.UserServiceStub(login_url=fixed_login_url,
+                                        logout_url=fixed_logout_url))
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'urlfetch',
-    urlfetch_stub.URLFetchServiceStub())
+      'urlfetch',
+      urlfetch_stub.URLFetchServiceStub())
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'mail',
-    mail_stub.MailServiceStub(smtp_host,
-                              smtp_port,
-                              smtp_user,
-                              smtp_password,
-                              enable_sendmail=enable_sendmail,
-                              show_mail_body=show_mail_body))
+      'mail',
+      mail_stub.MailServiceStub(smtp_host,
+                                smtp_port,
+                                smtp_user,
+                                smtp_password,
+                                enable_sendmail=enable_sendmail,
+                                show_mail_body=show_mail_body))
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'memcache',
-    memcache_stub.MemcacheServiceStub())
+      'memcache',
+      memcache_stub.MemcacheServiceStub())
+
+  apiproxy_stub_map.apiproxy.RegisterStub(
+      'capability_service',
+      capability_stub.CapabilityServiceStub())
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'capability_service',
-    capability_stub.CapabilityServiceStub())
+      'taskqueue',
+      taskqueue_stub.TaskQueueServiceStub(root_path=root_path))
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'taskqueue',
-    taskqueue_stub.TaskQueueServiceStub(root_path=root_path))
+      'xmpp',
+      xmpp_service_stub.XmppServiceStub())
+
 
 
   try:
     from google.appengine.api.images import images_stub
     apiproxy_stub_map.apiproxy.RegisterStub(
-      'images',
-      images_stub.ImagesServiceStub())
+        'images',
+        images_stub.ImagesServiceStub())
   except ImportError, e:
     logging.warning('Could not initialize images API; you are likely missing '
                     'the Python "PIL" module. ImportError: %s', e)
     from google.appengine.api.images import images_not_implemented_stub
-    apiproxy_stub_map.apiproxy.RegisterStub('images',
-      images_not_implemented_stub.ImagesNotImplementedServiceStub())
+    apiproxy_stub_map.apiproxy.RegisterStub(
+        'images',
+        images_not_implemented_stub.ImagesNotImplementedServiceStub())
 
 
 def CreateImplicitMatcher(module_dict,
@@ -3314,7 +3386,9 @@
     module_dict: Dictionary in the form used by sys.modules.
     root_path: Path to the root of the application.
     login_url: Relative URL which should be used for handling user login/logout.
+    create_path_adjuster: Used for dependedency injection.
     create_local_dispatcher: Used for dependency injection.
+    create_cgi_dispatcher: Used for dependedency injection.
 
   Returns:
     Instance of URLMatcher with appropriate dispatchers.
@@ -3393,6 +3467,7 @@
       are stored.
     serve_address: Address on which the server should serve.
     require_indexes: True if index.yaml is read-only gospel; default False.
+    allow_skipped_files: True if skipped files should be accessible.
     static_caching: True if browser caching of static files should be allowed.
     python_path_list: Used for dependency injection.
     sdk_dir: Directory where the SDK is stored.
@@ -3415,5 +3490,53 @@
 
   if absolute_root_path not in python_path_list:
     python_path_list.insert(0, absolute_root_path)
-
-  return BaseHTTPServer.HTTPServer((serve_address, port), handler_class)
+  return HTTPServerWithScheduler((serve_address, port), handler_class)
+
+
+class HTTPServerWithScheduler(BaseHTTPServer.HTTPServer):
+  """A BaseHTTPServer subclass that calls a method at a regular interval."""
+
+  def __init__(self, server_address, request_handler_class):
+    """Constructor.
+
+    Args:
+      server_address: the bind address of the server.
+      request_handler_class: class used to handle requests.
+    """
+    BaseHTTPServer.HTTPServer.__init__(self, server_address,
+                                       request_handler_class)
+    self._events = []
+
+  def get_request(self, time_func=time.time, select_func=select.select):
+    """Overrides the base get_request call.
+
+    Args:
+      time_func: used for testing.
+      select_func: used for testing.
+
+    Returns:
+      a (socket_object, address info) tuple.
+    """
+    while True:
+      if self._events:
+        current_time = time_func()
+        next_eta = self._events[0][0]
+        delay = next_eta - current_time
+      else:
+        delay = DEFAULT_SELECT_DELAY
+      readable, _, _ = select_func([self.socket], [], [], max(delay, 0))
+      if readable:
+        return self.socket.accept()
+      current_time = time_func()
+      if self._events and current_time >= self._events[0][0]:
+        unused_eta, runnable = heapq.heappop(self._events)
+        runnable()
+
+  def AddEvent(self, eta, runnable):
+    """Add a runnable event to be run at the specified time.
+
+    Args:
+      eta: when to run the event, in seconds since epoch.
+      runnable: a callable object.
+    """
+    heapq.heappush(self._events, (eta, runnable))
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver_main.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver_main.py	Mon Sep 07 20:27:37 2009 +0200
@@ -68,10 +68,15 @@
 import logging
 import os
 import re
+import signal
 import sys
 import traceback
 import tempfile
 
+logging.basicConfig(
+    level=logging.INFO,
+    format='%(levelname)-8s %(asctime)s %(filename)s:%(lineno)s] %(message)s')
+
 
 def SetGlobals():
   """Set various global variables involving the 'google' package.
@@ -379,6 +384,14 @@
   return server
 
 
+def SigTermHandler(signum, frame):
+  """Handler for TERM signal.
+
+  Raises a KeyboardInterrupt to perform a graceful shutdown on SIGTERM signal.
+  """
+  raise KeyboardInterrupt()
+
+
 def main(argv):
   """Runs the development application server."""
   args, option_dict = ParseArguments(argv)
@@ -419,9 +432,7 @@
 
   option_dict['root_path'] = os.path.realpath(root_path)
 
-  logging.basicConfig(
-    level=log_level,
-    format='%(levelname)-8s %(asctime)s %(filename)s:%(lineno)s] %(message)s')
+  logging.getLogger().setLevel(log_level)
 
   config = None
   try:
@@ -461,6 +472,8 @@
       allow_skipped_files=allow_skipped_files,
       static_caching=static_caching)
 
+  signal.signal(signal.SIGTERM, SigTermHandler)
+
   logging.info('Running application %s on port %d: http://%s:%d',
                config.application, port, serve_address, port)
   try:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/remote_api_shell.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""An interactive python shell that uses remote_api.
+
+Usage:
+  remote_api_shell.py [-s HOSTNAME] APPID [PATH]
+"""
+
+
+import atexit
+import code
+import getpass
+import optparse
+import os
+import sys
+
+try:
+  import readline
+except ImportError:
+  readline = None
+
+from google.appengine.ext.remote_api import remote_api_stub
+
+from google.appengine.api import datastore
+from google.appengine.api import memcache
+from google.appengine.api import urlfetch
+from google.appengine.api import users
+from google.appengine.ext import db
+from google.appengine.ext import search
+
+
+HISTORY_PATH = os.path.expanduser('~/.remote_api_shell_history')
+DEFAULT_PATH = '/remote_api'
+BANNER = """App Engine remote_api shell
+Python %s
+The db, users, urlfetch, and memcache modules are imported.""" % sys.version
+
+
+def auth_func():
+  return (raw_input('Email: '), getpass.getpass('Password: '))
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--server', dest='server',
+                    help='The hostname your app is deployed on. '
+                         'Defaults to <app_id>.appspot.com.')
+  (options, args) = parser.parse_args()
+
+  if not args or len(args) > 2:
+    print >> sys.stderr, __doc__
+    if len(args) > 2:
+      print >> sys.stderr, 'Unexpected arguments: %s' % args[2:]
+    sys.exit(1)
+
+  appid = args[0]
+  if len(args) == 2:
+    path = args[1]
+  else:
+    path = DEFAULT_PATH
+
+  remote_api_stub.ConfigureRemoteApi(appid, path, auth_func,
+                                     servername=options.server)
+  remote_api_stub.MaybeInvokeAuthentication()
+
+  os.environ['SERVER_SOFTWARE'] = 'Development (remote_api_shell)/1.0'
+
+  sys.ps1 = '%s> ' % appid
+  if readline is not None:
+    readline.parse_and_bind('tab: complete')
+    atexit.register(lambda: readline.write_history_file(HISTORY_PATH))
+    if os.path.exists(HISTORY_PATH):
+      readline.read_history_file(HISTORY_PATH)
+
+  code.interact(banner=BANNER, local=globals())
+
+
+if __name__ == '__main__':
+  main(sys.argv)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/requeue.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,219 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A thread-safe queue in which removed objects put back to the front."""
+
+
+import logging
+import Queue
+import threading
+import time
+
+logger = logging.getLogger('google.appengine.tools.requeue')
+
+
+class ReQueue(object):
+  """A special thread-safe queue.
+
+  A ReQueue allows unfinished work items to be returned with a call to
+  reput().  When an item is reput, task_done() should *not* be called
+  in addition, getting an item that has been reput does not increase
+  the number of outstanding tasks.
+
+  This class shares an interface with Queue.Queue and provides the
+  additional reput method.
+  """
+
+  def __init__(self,
+               queue_capacity,
+               requeue_capacity=None,
+               queue_factory=Queue.Queue,
+               get_time=time.time):
+    """Initialize a ReQueue instance.
+
+    Args:
+      queue_capacity: The number of items that can be put in the ReQueue.
+      requeue_capacity: The numer of items that can be reput in the ReQueue.
+      queue_factory: Used for dependency injection.
+      get_time: Used for dependency injection.
+    """
+    if requeue_capacity is None:
+      requeue_capacity = queue_capacity
+
+    self.get_time = get_time
+    self.queue = queue_factory(queue_capacity)
+    self.requeue = queue_factory(requeue_capacity)
+    self.lock = threading.Lock()
+    self.put_cond = threading.Condition(self.lock)
+    self.get_cond = threading.Condition(self.lock)
+
+  def _DoWithTimeout(self,
+                     action,
+                     exc,
+                     wait_cond,
+                     done_cond,
+                     lock,
+                     timeout=None,
+                     block=True):
+    """Performs the given action with a timeout.
+
+    The action must be non-blocking, and raise an instance of exc on a
+    recoverable failure.  If the action fails with an instance of exc,
+    we wait on wait_cond before trying again.  Failure after the
+    timeout is reached is propagated as an exception.  Success is
+    signalled by notifying on done_cond and returning the result of
+    the action.  If action raises any exception besides an instance of
+    exc, it is immediately propagated.
+
+    Args:
+      action: A callable that performs a non-blocking action.
+      exc: An exception type that is thrown by the action to indicate
+        a recoverable error.
+      wait_cond: A condition variable which should be waited on when
+        action throws exc.
+      done_cond: A condition variable to signal if the action returns.
+      lock: The lock used by wait_cond and done_cond.
+      timeout: A non-negative float indicating the maximum time to wait.
+      block: Whether to block if the action cannot complete immediately.
+
+    Returns:
+      The result of the action, if it is successful.
+
+    Raises:
+      ValueError: If the timeout argument is negative.
+    """
+    if timeout is not None and timeout < 0.0:
+      raise ValueError('\'timeout\' must not be a negative  number')
+    if not block:
+      timeout = 0.0
+    result = None
+    success = False
+    start_time = self.get_time()
+    lock.acquire()
+    try:
+      while not success:
+        try:
+          result = action()
+          success = True
+        except Exception, e:
+          if not isinstance(e, exc):
+            raise e
+          if timeout is not None:
+            elapsed_time = self.get_time() - start_time
+            timeout -= elapsed_time
+            if timeout <= 0.0:
+              raise e
+          wait_cond.wait(timeout)
+    finally:
+      if success:
+        done_cond.notify()
+      lock.release()
+    return result
+
+  def put(self, item, block=True, timeout=None):
+    """Put an item into the requeue.
+
+    Args:
+      item: An item to add to the requeue.
+      block: Whether to block if the requeue is full.
+      timeout: Maximum on how long to wait until the queue is non-full.
+
+    Raises:
+      Queue.Full if the queue is full and the timeout expires.
+    """
+    def PutAction():
+      self.queue.put(item, block=False)
+    self._DoWithTimeout(PutAction,
+                        Queue.Full,
+                        self.get_cond,
+                        self.put_cond,
+                        self.lock,
+                        timeout=timeout,
+                        block=block)
+
+  def reput(self, item, block=True, timeout=None):
+    """Re-put an item back into the requeue.
+
+    Re-putting an item does not increase the number of outstanding
+    tasks, so the reput item should be uniquely associated with an
+    item that was previously removed from the requeue and for which
+    TaskDone has not been called.
+
+    Args:
+      item: An item to add to the requeue.
+      block: Whether to block if the requeue is full.
+      timeout: Maximum on how long to wait until the queue is non-full.
+
+    Raises:
+      Queue.Full is the queue is full and the timeout expires.
+    """
+    def ReputAction():
+      self.requeue.put(item, block=False)
+    self._DoWithTimeout(ReputAction,
+                        Queue.Full,
+                        self.get_cond,
+                        self.put_cond,
+                        self.lock,
+                        timeout=timeout,
+                        block=block)
+
+  def get(self, block=True, timeout=None):
+    """Get an item from the requeue.
+
+    Args:
+      block: Whether to block if the requeue is empty.
+      timeout: Maximum on how long to wait until the requeue is non-empty.
+
+    Returns:
+      An item from the requeue.
+
+    Raises:
+      Queue.Empty if the queue is empty and the timeout expires.
+    """
+    def GetAction():
+      try:
+        result = self.requeue.get(block=False)
+        self.requeue.task_done()
+      except Queue.Empty:
+        result = self.queue.get(block=False)
+      return result
+    return self._DoWithTimeout(GetAction,
+                               Queue.Empty,
+                               self.put_cond,
+                               self.get_cond,
+                               self.lock,
+                               timeout=timeout,
+                               block=block)
+
+  def join(self):
+    """Blocks until all of the items in the requeue have been processed."""
+    self.queue.join()
+
+  def task_done(self):
+    """Indicate that a previously enqueued item has been fully processed."""
+    self.queue.task_done()
+
+  def empty(self):
+    """Returns true if the requeue is empty."""
+    return self.queue.empty() and self.requeue.empty()
+
+  def get_nowait(self):
+    """Try to get an item from the queue without blocking."""
+    return self.get(block=False)
+
+  def qsize(self):
+    return self.queue.qsize() + self.requeue.qsize()
--- a/thirdparty/google_appengine/google/net/proto/ProtocolBuffer.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/google/net/proto/ProtocolBuffer.py	Mon Sep 07 20:27:37 2009 +0200
@@ -344,13 +344,13 @@
 
   def putFloat(self, v):
     a = array.array('B')
-    a.fromstring(struct.pack("f", v))
+    a.fromstring(struct.pack("<f", v))
     self.buf.extend(a)
     return
 
   def putDouble(self, v):
     a = array.array('B')
-    a.fromstring(struct.pack("d", v))
+    a.fromstring(struct.pack("<d", v))
     self.buf.extend(a)
     return
 
@@ -362,6 +362,7 @@
     return
 
   def putPrefixedString(self, v):
+    v = str(v)
     self.putVarInt32(len(v))
     self.buf.fromstring(v)
     return
@@ -499,13 +500,13 @@
     if self.idx + 4 > self.limit: raise ProtocolBufferDecodeError, "truncated"
     a = self.buf[self.idx:self.idx+4]
     self.idx += 4
-    return struct.unpack("f", a)[0]
+    return struct.unpack("<f", a)[0]
 
   def getDouble(self):
     if self.idx + 8 > self.limit: raise ProtocolBufferDecodeError, "truncated"
     a = self.buf[self.idx:self.idx+8]
     self.idx += 8
-    return struct.unpack("d", a)[0]
+    return struct.unpack("<d", a)[0]
 
   def getBoolean(self):
     b = self.get8()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/net/proto/message_set.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,291 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""This module contains the MessageSet class, which is a special kind of
+protocol message which can contain other protocol messages without knowing
+their types.  See the class's doc string for more information."""
+
+
+from google.net.proto import ProtocolBuffer
+import logging
+
+TAG_BEGIN_ITEM_GROUP = 11
+TAG_END_ITEM_GROUP   = 12
+TAG_TYPE_ID          = 16
+TAG_MESSAGE          = 26
+
+class Item:
+
+  def __init__(self, message, message_class=None):
+    self.message = message
+    self.message_class = message_class
+
+  def SetToDefaultInstance(self, message_class):
+    self.message = message_class()
+    self.message_class = message_class
+
+  def Parse(self, message_class):
+
+    if self.message_class is not None:
+      return 1
+
+    try:
+      self.message = message_class(self.message)
+      self.message_class = message_class
+      return 1
+    except ProtocolBuffer.ProtocolBufferDecodeError:
+      logging.warn("Parse error in message inside MessageSet.  Tried "
+                   "to parse as: " + message_class.__name__)
+      return 0
+
+  def MergeFrom(self, other):
+
+    if self.message_class is not None:
+      if other.Parse(self.message_class):
+        self.message.MergeFrom(other.message)
+
+    elif other.message_class is not None:
+      if not self.Parse(other.message_class):
+        self.message = other.message_class()
+        self.message_class = other.message_class
+      self.message.MergeFrom(other.message)
+
+    else:
+      self.message += other.message
+
+  def Copy(self):
+
+    if self.message_class is None:
+      return Item(self.message)
+    else:
+      new_message = self.message_class()
+      new_message.CopyFrom(self.message)
+      return Item(new_message, self.message_class)
+
+  def Equals(self, other):
+
+    if self.message_class is not None:
+      if not other.Parse(self.message_class): return 0
+      return self.message.Equals(other.message)
+
+    elif other.message_class is not None:
+      if not self.Parse(other.message_class): return 0
+      return self.message.Equals(other.message)
+
+    else:
+      return self.message == other.message
+
+  def IsInitialized(self, debug_strs=None):
+
+    if self.message_class is None:
+      return 1
+    else:
+      return self.message.IsInitialized(debug_strs)
+
+  def ByteSize(self, pb, type_id):
+
+    message_length = 0
+    if self.message_class is None:
+      message_length = len(self.message)
+    else:
+      message_length = self.message.ByteSize()
+
+    return pb.lengthString(message_length) + pb.lengthVarInt64(type_id) + 2
+
+  def OutputUnchecked(self, out, type_id):
+
+    out.putVarInt32(TAG_TYPE_ID)
+    out.putVarUint64(type_id)
+    out.putVarInt32(TAG_MESSAGE)
+    if self.message_class is None:
+      out.putPrefixedString(self.message)
+    else:
+      out.putVarInt32(self.message.ByteSize())
+      self.message.OutputUnchecked(out)
+
+  def Decode(decoder):
+
+    type_id = 0
+    message = None
+    while 1:
+      tag = decoder.getVarInt32()
+      if tag == TAG_END_ITEM_GROUP:
+        break
+      if tag == TAG_TYPE_ID:
+        type_id = decoder.getVarUint64()
+        continue
+      if tag == TAG_MESSAGE:
+        message = decoder.getPrefixedString()
+        continue
+      if tag == 0: raise ProtocolBuffer.ProtocolBufferDecodeError
+      decoder.skipData(tag)
+
+    if type_id == 0 or message is None:
+      raise ProtocolBuffer.ProtocolBufferDecodeError
+    return (type_id, message)
+  Decode = staticmethod(Decode)
+
+
+class MessageSet(ProtocolBuffer.ProtocolMessage):
+
+  def __init__(self, contents=None):
+    self.items = dict()
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def get(self, message_class):
+
+    if message_class.MESSAGE_TYPE_ID not in self.items:
+      return message_class()
+    item = self.items[message_class.MESSAGE_TYPE_ID]
+    if item.Parse(message_class):
+      return item.message
+    else:
+      return message_class()
+
+  def mutable(self, message_class):
+
+    if message_class.MESSAGE_TYPE_ID not in self.items:
+      message = message_class()
+      self.items[message_class.MESSAGE_TYPE_ID] = Item(message, message_class)
+      return message
+    item = self.items[message_class.MESSAGE_TYPE_ID]
+    if not item.Parse(message_class):
+      item.SetToDefaultInstance(message_class)
+    return item.message
+
+  def has(self, message_class):
+
+    if message_class.MESSAGE_TYPE_ID not in self.items:
+      return 0
+    item = self.items[message_class.MESSAGE_TYPE_ID]
+    return item.Parse(message_class)
+
+  def has_unparsed(self, message_class):
+    return message_class.MESSAGE_TYPE_ID in self.items
+
+  def GetTypeIds(self):
+    return self.items.keys()
+
+  def NumMessages(self):
+    return len(self.items)
+
+  def remove(self, message_class):
+    if message_class.MESSAGE_TYPE_ID in self.items:
+      del self.items[message_class.MESSAGE_TYPE_ID]
+
+
+  def __getitem__(self, message_class):
+    if message_class.MESSAGE_TYPE_ID not in self.items:
+      raise KeyError(message_class)
+    item = self.items[message_class.MESSAGE_TYPE_ID]
+    if item.Parse(message_class):
+      return item.message
+    else:
+      raise KeyError(message_class)
+
+  def __setitem__(self, message_class, message):
+    self.items[message_class.MESSAGE_TYPE_ID] = Item(message, message_class)
+
+  def __contains__(self, message_class):
+    return self.has(message_class)
+
+  def __delitem__(self, message_class):
+    self.remove(message_class)
+
+  def __len__(self):
+    return len(self.items)
+
+
+  def MergeFrom(self, other):
+
+    assert other is not self
+
+    for (type_id, item) in other.items.items():
+      if type_id in self.items:
+        self.items[type_id].MergeFrom(item)
+      else:
+        self.items[type_id] = item.Copy()
+
+  def Equals(self, other):
+    if other is self: return 1
+    if len(self.items) != len(other.items): return 0
+
+    for (type_id, item) in other.items.items():
+      if type_id not in self.items: return 0
+      if not self.items[type_id].Equals(item): return 0
+
+    return 1
+
+  def __eq__(self, other):
+    return ((other is not None)
+        and (other.__class__ == self.__class__)
+        and self.Equals(other))
+
+  def __ne__(self, other):
+    return not (self == other)
+
+  def IsInitialized(self, debug_strs=None):
+
+    initialized = 1
+    for item in self.items.values():
+      if not item.IsInitialized(debug_strs):
+        initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 2 * len(self.items)
+    for (type_id, item) in self.items.items():
+      n += item.ByteSize(self, type_id)
+    return n
+
+  def Clear(self):
+    self.items = dict()
+
+  def OutputUnchecked(self, out):
+    for (type_id, item) in self.items.items():
+      out.putVarInt32(TAG_BEGIN_ITEM_GROUP)
+      item.OutputUnchecked(out, type_id)
+      out.putVarInt32(TAG_END_ITEM_GROUP)
+
+  def TryMerge(self, decoder):
+    while decoder.avail() > 0:
+      tag = decoder.getVarInt32()
+      if tag == TAG_BEGIN_ITEM_GROUP:
+        (type_id, message) = Item.Decode(decoder)
+        if type_id in self.items:
+          self.items[type_id].MergeFrom(Item(message))
+        else:
+          self.items[type_id] = Item(message)
+        continue
+      if (tag == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      decoder.skipData(tag)
+
+  def __str__(self, prefix="", printElemNumber=0):
+    text = ""
+    for (type_id, item) in self.items.items():
+      if item.message_class is None:
+        text += "%s[%d] <\n" % (prefix, type_id)
+        text += "%s  (%d bytes)\n" % (prefix, len(item.message))
+        text += "%s>\n" % prefix
+      else:
+        text += "%s[%s] <\n" % (prefix, item.message_class.__name__)
+        text += item.message.__str__(prefix + "  ", printElemNumber)
+        text += "%s>\n" % prefix
+    return text
+
+__all__ = ['MessageSet']
--- a/thirdparty/google_appengine/lib/django/MANIFEST.in	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-include AUTHORS
-include INSTALL
-include LICENSE
-recursive-include docs *
-recursive-include scripts *
-recursive-include django/conf/locale *
-recursive-include django/contrib/admin/templates *
-recursive-include django/contrib/admin/media *
-recursive-include django/contrib/comments/templates *
-recursive-include django/contrib/sitemaps/templates *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/lib/django/PKG-INFO	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,11 @@
+Metadata-Version: 1.0
+Name: Django
+Version: 0.96.4
+Summary: A high-level Python Web framework that encourages rapid development and clean, pragmatic design.
+Home-page: http://www.djangoproject.com/
+Author: Django Software Foundation
+Author-email: foundation@djangoproject.com
+License: UNKNOWN
+Download-URL: http://media.djangoproject.com/releases/0.96/Django-0.96.4.tar.gz
+Description: UNKNOWN
+Platform: UNKNOWN
--- a/thirdparty/google_appengine/lib/django/django/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/__init__.py	Mon Sep 07 20:27:37 2009 +0200
@@ -1,1 +1,1 @@
-VERSION = (0, 96.1, None)
+VERSION = (0, 96.4, None)
--- a/thirdparty/google_appengine/lib/django/django/conf/global_settings.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/conf/global_settings.py	Mon Sep 07 20:27:37 2009 +0200
@@ -237,7 +237,7 @@
 
 # The User-Agent string to use when checking for URL validity through the
 # isExistingURL validator.
-URL_VALIDATOR_USER_AGENT = "Django/0.96.1 (http://www.djangoproject.com)"
+URL_VALIDATOR_USER_AGENT = "Django/0.96.2 (http://www.djangoproject.com)"
 
 ##############
 # MIDDLEWARE #
--- a/thirdparty/google_appengine/lib/django/django/contrib/admin/templates/admin/login.html	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/contrib/admin/templates/admin/login.html	Mon Sep 07 20:27:37 2009 +0200
@@ -19,7 +19,6 @@
   <div class="form-row">
     <label for="id_password">{% trans 'Password:' %}</label> <input type="password" name="password" id="id_password" />
     <input type="hidden" name="this_is_the_login_form" value="1" />
-    <input type="hidden" name="post_data" value="{{ post_data }}" /> {#<span class="help">{% trans 'Have you <a href="/password_reset/">forgotten your password</a>?' %}</span>#}
   </div>
   <div class="submit-row">
     <label>&nbsp;</label><input type="submit" value="{% trans 'Log in' %}" />
--- a/thirdparty/google_appengine/lib/django/django/contrib/admin/views/decorators.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/contrib/admin/views/decorators.py	Mon Sep 07 20:27:37 2009 +0200
@@ -3,43 +3,21 @@
 from django.contrib.auth.models import User
 from django.contrib.auth import authenticate, login
 from django.shortcuts import render_to_response
+from django.utils.html import escape
 from django.utils.translation import gettext_lazy
-import base64, datetime, md5
-import cPickle as pickle
+import base64, datetime
 
 ERROR_MESSAGE = gettext_lazy("Please enter a correct username and password. Note that both fields are case-sensitive.")
 LOGIN_FORM_KEY = 'this_is_the_login_form'
 
 def _display_login_form(request, error_message=''):
     request.session.set_test_cookie()
-    if request.POST and request.POST.has_key('post_data'):
-        # User has failed login BUT has previously saved post data.
-        post_data = request.POST['post_data']
-    elif request.POST:
-        # User's session must have expired; save their post data.
-        post_data = _encode_post_data(request.POST)
-    else:
-        post_data = _encode_post_data({})
     return render_to_response('admin/login.html', {
         'title': _('Log in'),
-        'app_path': request.path,
-        'post_data': post_data,
+        'app_path': escape(request.path),
         'error_message': error_message
     }, context_instance=template.RequestContext(request))
 
-def _encode_post_data(post_data):
-    pickled = pickle.dumps(post_data)
-    pickled_md5 = md5.new(pickled + settings.SECRET_KEY).hexdigest()
-    return base64.encodestring(pickled + pickled_md5)
-
-def _decode_post_data(encoded_data):
-    encoded_data = base64.decodestring(encoded_data)
-    pickled, tamper_check = encoded_data[:-32], encoded_data[-32:]
-    if md5.new(pickled + settings.SECRET_KEY).hexdigest() != tamper_check:
-        from django.core.exceptions import SuspiciousOperation
-        raise SuspiciousOperation, "User may have tampered with session cookie."
-    return pickle.loads(pickled)
-
 def staff_member_required(view_func):
     """
     Decorator for views that checks that the user is logged in and is a staff
@@ -48,10 +26,6 @@
     def _checklogin(request, *args, **kwargs):
         if request.user.is_authenticated() and request.user.is_staff:
             # The user is valid. Continue to the admin page.
-            if request.POST.has_key('post_data'):
-                # User must have re-authenticated through a different window
-                # or tab.
-                request.POST = _decode_post_data(request.POST['post_data'])
             return view_func(request, *args, **kwargs)
 
         assert hasattr(request, 'session'), "The Django admin requires session middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.sessions.middleware.SessionMiddleware'."
@@ -59,7 +33,7 @@
         # If this isn't already the login page, display it.
         if not request.POST.has_key(LOGIN_FORM_KEY):
             if request.POST:
-                message = _("Please log in again, because your session has expired. Don't worry: Your submission has been saved.")
+                message = _("Please log in again, because your session has expired.")
             else:
                 message = ""
             return _display_login_form(request, message)
@@ -92,16 +66,7 @@
                 # TODO: set last_login with an event.
                 user.last_login = datetime.datetime.now()
                 user.save()
-                if request.POST.has_key('post_data'):
-                    post_data = _decode_post_data(request.POST['post_data'])
-                    if post_data and not post_data.has_key(LOGIN_FORM_KEY):
-                        # overwrite request.POST with the saved post_data, and continue
-                        request.POST = post_data
-                        request.user = user
-                        return view_func(request, *args, **kwargs)
-                    else:
-                        request.session.delete_test_cookie()
-                        return http.HttpResponseRedirect(request.path)
+                return http.HttpResponseRedirect(request.path)
             else:
                 return _display_login_form(request, ERROR_MESSAGE)
 
--- a/thirdparty/google_appengine/lib/django/django/contrib/flatpages/README.TXT	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-This is an optional add-on app, flatpages.
-
-For full documentation, see either of these:
-
-    * The file docs/flatpages.txt in the Django distribution
-    * http://www.djangoproject.com/documentation/flatpages/ on the Web
-
-Both have identical content.
--- a/thirdparty/google_appengine/lib/django/django/contrib/formtools/templates/formtools/form.html	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-{% extends "base.html" %}
-
-{% block content %}
-
-{% if form.errors %}<h1>Please correct the following errors</h1>{% else %}<h1>Submit</h1>{% endif %}
-
-<form action="" method="post">
-<table>
-{{ form }}
-</table>
-<input type="hidden" name="{{ stage_field }}" value="1" />
-<p><input type="submit" value="Preview" /></p>
-</form>
-
-{% endblock %}
--- a/thirdparty/google_appengine/lib/django/django/contrib/formtools/templates/formtools/preview.html	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,36 +0,0 @@
-{% extends "base.html" %}
-
-{% block content %}
-
-<h1>Preview your submission</h1>
-
-<table>
-{% for field in form %}
-<tr>
-<th>{{ field.label }}:</th>
-<td>{{ field.data|escape }}</td>
-</tr>
-{% endfor %}
-</table>
-
-<p>Security hash: {{ hash_value }}</p>
-
-<form action="" method="post">
-{% for field in form %}{{ field.as_hidden }}
-{% endfor %}
-<input type="hidden" name="{{ stage_field }}" value="2" />
-<input type="hidden" name="{{ hash_field }}" value="{{ hash_value }}" />
-<p><input type="submit" value="Submit" /></p>
-</form>
-
-<h1>Or edit it again</h1>
-
-<form action="" method="post">
-<table>
-{{ form }}
-</table>
-<input type="hidden" name="{{ stage_field }}" value="1" />
-<p><input type="submit" value="Preview" /></p>
-</form>
-
-{% endblock %}
--- a/thirdparty/google_appengine/lib/django/django/contrib/redirects/README.TXT	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-This is an optional add-on app, redirects.
-
-For full documentation, see either of these:
-
-    * The file django/docs/redirects.txt in the Django distribution
-    * http://www.djangoproject.com/documentation/redirects/ on the Web
-
-Both have identical content.
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/django/core/management.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/core/management.py	Mon Sep 07 20:27:37 2009 +0200
@@ -1192,9 +1192,7 @@
         print "Development server is running at http://%s:%s/" % (addr, port)
         print "Quit the server with %s." % quit_command
         try:
-            import django
-            path = admin_media_dir or django.__path__[0] + '/contrib/admin/media'
-            handler = AdminMediaHandler(WSGIHandler(), path)
+            handler = AdminMediaHandler(WSGIHandler(), admin_media_path)
             run(addr, int(port), handler)
         except WSGIServerException, e:
             # Use helpful error messages instead of ugly tracebacks.
--- a/thirdparty/google_appengine/lib/django/django/core/servers/basehttp.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/core/servers/basehttp.py	Mon Sep 07 20:27:37 2009 +0200
@@ -11,6 +11,8 @@
 from types import ListType, StringType
 import os, re, sys, time, urllib
 
+from django.utils._os import safe_join
+
 __version__ = "0.1"
 __all__ = ['WSGIServer','WSGIRequestHandler','demo_app']
 
@@ -599,11 +601,25 @@
         self.application = application
         if not media_dir:
             import django
-            self.media_dir = django.__path__[0] + '/contrib/admin/media'
+            self.media_dir = \
+                os.path.join(django.__path__[0], 'contrib', 'admin', 'media')
         else:
             self.media_dir = media_dir
         self.media_url = settings.ADMIN_MEDIA_PREFIX
 
+    def file_path(self, url):
+        """
+        Returns the path to the media file on disk for the given URL.
+
+        The passed URL is assumed to begin with ADMIN_MEDIA_PREFIX.  If the
+        resultant file path is outside the media directory, then a ValueError
+        is raised.
+        """
+        # Remove ADMIN_MEDIA_PREFIX.
+        relative_url = url[len(self.media_url):]
+        relative_path = urllib.url2pathname(relative_url)
+        return safe_join(self.media_dir, relative_path)
+
     def __call__(self, environ, start_response):
         import os.path
 
@@ -614,19 +630,25 @@
             return self.application(environ, start_response)
 
         # Find the admin file and serve it up, if it exists and is readable.
-        relative_url = environ['PATH_INFO'][len(self.media_url):]
-        file_path = os.path.join(self.media_dir, relative_url)
+        try:
+            file_path = self.file_path(environ['PATH_INFO'])
+        except ValueError: # Resulting file path was not valid.
+            status = '404 NOT FOUND'
+            headers = {'Content-type': 'text/plain'}
+            output = ['Page not found: %s' % environ['PATH_INFO']]
+            start_response(status, headers.items())
+            return output
         if not os.path.exists(file_path):
             status = '404 NOT FOUND'
             headers = {'Content-type': 'text/plain'}
-            output = ['Page not found: %s' % file_path]
+            output = ['Page not found: %s' % environ['PATH_INFO']]
         else:
             try:
                 fp = open(file_path, 'rb')
             except IOError:
                 status = '401 UNAUTHORIZED'
                 headers = {'Content-type': 'text/plain'}
-                output = ['Permission denied: %s' % file_path]
+                output = ['Permission denied: %s' % environ['PATH_INFO']]
             else:
                 status = '200 OK'
                 headers = {}
--- a/thirdparty/google_appengine/lib/django/django/dispatch/license.txt	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,34 +0,0 @@
-PyDispatcher License
-
-    Copyright (c) 2001-2003, Patrick K. O'Brien and Contributors
-    All rights reserved.
-    
-    Redistribution and use in source and binary forms, with or without
-    modification, are permitted provided that the following conditions
-    are met:
-    
-        Redistributions of source code must retain the above copyright
-        notice, this list of conditions and the following disclaimer.
-    
-        Redistributions in binary form must reproduce the above
-        copyright notice, this list of conditions and the following
-        disclaimer in the documentation and/or other materials
-        provided with the distribution.
-    
-        The name of Patrick K. O'Brien, or the name of any Contributor,
-        may not be used to endorse or promote products derived from this 
-        software without specific prior written permission.
-    
-    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-    ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-    FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-    COPYRIGHT HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
-    INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-    (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-    SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
-    OF THE POSSIBILITY OF SUCH DAMAGE. 
-
--- a/thirdparty/google_appengine/lib/django/django/utils/simplejson/LICENSE.txt	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-simplejson 1.5
-Copyright (c) 2006 Bob Ippolito
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
--- a/thirdparty/google_appengine/lib/django/examples/hello/urls.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-from django.conf.urls.defaults import *
-
-urlpatterns = patterns('examples.hello.views',
-    (r'^html/$', 'hello_html'),
-    (r'^text/$', 'hello_text'),
-    (r'^write/$', 'hello_write'),
-    (r'^metadata/$', 'metadata'),
-    (r'^getdata/$', 'get_data'),
-    (r'^postdata/$', 'post_data'),
-)
--- a/thirdparty/google_appengine/lib/django/examples/hello/views.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,55 +0,0 @@
-from django.http import HttpResponse
-from django.utils.html import escape
-
-def hello_html(request):
-    "This view is a basic 'hello world' example in HTML."
-    return HttpResponse('<h1>Hello, world.</h1>')
-
-def hello_text(request):
-    "This view is a basic 'hello world' example in plain text."
-    return HttpResponse('Hello, world.', mimetype='text/plain')
-
-def hello_write(request):
-    "This view demonstrates how an HttpResponse object has a write() method."
-    r = HttpResponse()
-    r.write("<p>Here's a paragraph.</p>")
-    r.write("<p>Here's another paragraph.</p>")
-    return r
-
-def metadata(request):
-    "This view demonstrates how to retrieve request metadata, such as HTTP headers."
-    r = HttpResponse('<h1>All about you</h1>')
-    r.write("<p>Here's all known metadata about your request, according to <code>request.META</code>:</p>")
-    r.write('<table>')
-    meta_items = request.META.items()
-    meta_items.sort()
-    for k, v in meta_items:
-        r.write('<tr><th>%s</th><td>%r</td></tr>' % (k, v))
-    r.write('</table>')
-    return r
-
-def get_data(request):
-    "This view demonstrates how to retrieve GET data."
-    r = HttpResponse()
-    if request.GET:
-        r.write('<p>GET data found! Here it is:</p>')
-        r.write('<ul>%s</ul>' % ''.join(['<li><strong>%s:</strong> %r</li>' % (escape(k), escape(v)) for k, v in request.GET.items()]))
-    r.write('<form action="" method="get">')
-    r.write('<p>First name: <input type="text" name="first_name"></p>')
-    r.write('<p>Last name: <input type="text" name="last_name"></p>')
-    r.write('<p><input type="submit" value="Submit"></p>')
-    r.write('</form>')
-    return r
-
-def post_data(request):
-    "This view demonstrates how to retrieve POST data."
-    r = HttpResponse()
-    if request.POST:
-        r.write('<p>POST data found! Here it is:</p>')
-        r.write('<ul>%s</ul>' % ''.join(['<li><strong>%s:</strong> %r</li>' % (escape(k), escape(v)) for k, v in request.POST.items()]))
-    r.write('<form action="" method="post">')
-    r.write('<p>First name: <input type="text" name="first_name"></p>')
-    r.write('<p>Last name: <input type="text" name="last_name"></p>')
-    r.write('<p><input type="submit" value="Submit"></p>')
-    r.write('</form>')
-    return r
--- a/thirdparty/google_appengine/lib/django/examples/manage.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-from django.core.management import execute_manager
-try:
-    import settings # Assumed to be in the same directory.
-except ImportError:
-    import sys
-    sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
-    sys.exit(1)
-
-if __name__ == "__main__":
-    execute_manager(settings)
--- a/thirdparty/google_appengine/lib/django/examples/settings.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-# Django settings for the example project.
-
-DEBUG = True
-TEMPLATE_DEBUG = DEBUG
-ROOT_URLCONF = 'examples.urls'
--- a/thirdparty/google_appengine/lib/django/examples/urls.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-from django.conf.urls.defaults import *
-
-urlpatterns = patterns('',
-    (r'^$', 'examples.views.index'),
-    (r'^hello/', include('examples.hello.urls')),
-)
--- a/thirdparty/google_appengine/lib/django/examples/views.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-from django import http
-
-def index(request):
-    r = http.HttpResponse('<h1>Django examples</h1><ul>')
-    r.write('<li><a href="hello/html/">Hello world (HTML)</a></li>')
-    r.write('<li><a href="hello/text/">Hello world (text)</a></li>')
-    r.write('<li><a href="hello/write/">HttpResponse objects are file-like objects</a></li>')
-    r.write('<li><a href="hello/metadata/">Displaying request metadata</a></li>')
-    r.write('<li><a href="hello/getdata/">Displaying GET data</a></li>')
-    r.write('<li><a href="hello/postdata/">Displaying POST data</a></li>')
-    r.write('</ul>')
-    return r
--- a/thirdparty/google_appengine/lib/django/extras/README.TXT	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-This directory contains extra stuff that can improve your Django experience.
--- a/thirdparty/google_appengine/lib/django/extras/django_bash_completion	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,159 +0,0 @@
-# #########################################################################
-# This bash script adds tab-completion feature to django-admin.py and
-# manage.py.
-#
-# Testing it out without installing
-# =================================
-#
-# To test out the completion without "installing" this, just run this file
-# directly, like so:
-#
-#     . ~/path/to/django_bash_completion
-#
-# Note: There's a dot ('.') at the beginning of that command.
-#
-# After you do that, tab completion will immediately be made available in your
-# current Bash shell. But it won't be available next time you log in.
-#
-# Installing
-# ==========
-#
-# To install this, point to this file from your .bash_profile, like so:
-#
-#     . ~/path/to/django_bash_completion
-#
-# Do the same in your .bashrc if .bashrc doesn't invoke .bash_profile.
-#
-# Settings will take effect the next time you log in.
-#
-# Uninstalling
-# ============
-#
-# To uninstall, just remove the line from your .bash_profile and .bashrc.
-
-# Enable extended pattern matching operators.
-shopt -s extglob
-
-_django_completion()
-{
-    local cur prev opts actions action_shell_opts action_runfcgi_opts
-    COMPREPLY=()
-    cur="${COMP_WORDS[COMP_CWORD]}"
-    prev="${COMP_WORDS[COMP_CWORD-1]}"
-
-    # Standalone options
-    opts="--help --settings --pythonpath --version"
-    # Actions
-    actions="adminindex createcachetable dbshell diffsettings \
-             inspectdb install reset runfcgi runserver \
-             shell sql sqlall sqlclear sqlindexes sqlinitialdata \
-             sqlreset sqlsequencereset startapp startproject \
-             syncdb validate"
-    # Action's options
-    action_shell_opts="--plain"
-    action_runfcgi_opts="host port socket method maxspare minspare maxchildren daemonize pidfile workdir"
-
-    if [[ # django-admin.py, ./manage, manage.py
-          ( ${COMP_CWORD} -eq 1 &&
-            ( ${COMP_WORDS[0]} == django-admin.py ||
-              ${COMP_WORDS[0]} == ./manage.py ||
-              ${COMP_WORDS[0]} == manage.py ) )
-          ||
-          # python manage.py, /some/path/python manage.py (if manage.py exists)
-          ( ${COMP_CWORD} -eq 2 &&
-            ( $( basename ${COMP_WORDS[0]} ) == python?([1-9]\.[0-9]) ) &&
-            ( $( basename ${COMP_WORDS[1]} ) == manage.py) &&
-            ( -r ${COMP_WORDS[1]} ) ) 
-          ||
-          ( ${COMP_CWORD} -eq 2 &&
-            ( $( basename ${COMP_WORDS[0]} ) == python?([1-9]\.[0-9]) ) &&
-            ( $( basename ${COMP_WORDS[1]} ) == django-admin.py) &&
-            ( -r ${COMP_WORDS[1]} ) ) ]] ; then
-
-        case ${cur} in
-            -*)
-                COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
-                action=$COMPREPLY
-                return 0
-                ;;
-            *)
-                COMPREPLY=( $(compgen -W "${actions}" -- ${cur}) )
-                action=$COMPREPLY
-                return 0
-                ;;
-        esac
-    else
-        case ${prev} in
-            adminindex|install|reset| \
-            sql|sqlall|sqlclear|sqlindexes| \
-            sqlinitialdata|sqlreset|sqlsequencereset)
-            # App completion
-            settings=""
-            # If settings.py in the PWD, use that
-            if [ -e settings.py ] ; then
-                settings="$PWD/settings.py"
-            else
-                # Use the ENV variable if it is set
-                if [ $DJANGO_SETTINGS_MODULE ] ; then
-                    settings=$DJANGO_SETTINGS_MODULE
-                fi
-            fi
-            # Couldn't find settings so return nothing
-            if [ -z $settings ] ; then
-                COMPREPLY=()
-            # Otherwise inspect settings.py file
-            else
-                apps=`sed -n "/INSTALLED_APPS = (/,/)/p" $settings | \
-                      grep -v "django.contrib" | 
-                      sed -n "s/^[ ]*'\(.*\.\)*\(.*\)'.*$/\2 /pg" | \
-                      tr -d "\n"`
-                COMPREPLY=( $(compgen -W "${apps}" -- ${cur}) )
-            fi
-            return 0
-            ;;
-
-            createcachetable|dbshell|diffsettings| \
-            inspectdb|runserver|startapp|startproject|syncdb| \
-            validate)
-                COMPREPLY=()
-                return 0
-                ;;
-            shell)
-                COMPREPLY=( $(compgen -W "$action_shell_opts" -- ${cur}) )
-                return 0
-                ;;
-            runfcgi)
-                COMPREPLY=( $(compgen -W "$action_runfcgi_opts" -- ${cur}) )
-                return 0
-                ;;
-            host*|port*|socket*|method*|maxspare*|minspare*|maxchildren*|daemonize*|pidfile*|workdir*)
-                if [ "$action"  == "runfcgi" ] ; then
-                    COMPREPLY=( $(compgen -W "$action_runfcgi_opts" -- ${cur}) )
-                    return 0
-                fi
-                return 0
-                ;;
-            *)
-                #COMPREPLY=( $(compgen -W "auth core" -- ${cur}) )
-                COMPREPLY=()
-                return 0
-                ;;
-        esac
-    fi
-}
-
-complete -F _django_completion django-admin.py manage.py
-
-# Support for multiple interpreters.
-unset pythons
-if command -v whereis &>/dev/null; then
-    python_interpreters=$(whereis python | cut -d " " -f 2-) 
-    for python in $python_interpreters; do
-        pythons="${pythons} $(basename $python)"
-    done
-    pythons=$(echo $pythons | tr " " "\n" | sort -u | tr "\n" " ")
-else
-    pythons=python    
-fi
-
-complete -F _django_completion -o default $pythons
--- a/thirdparty/google_appengine/lib/django/setup.py	Mon Sep 07 20:26:39 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/setup.py	Mon Sep 07 20:27:37 2009 +0200
@@ -34,10 +34,11 @@
 
 setup(
     name = "Django",
-    version = "0.96.1",
+    version = "0.96.4",
     url = 'http://www.djangoproject.com/',
-    author = 'Lawrence Journal-World',
-    author_email = 'holovaty@gmail.com',
+    author = 'Django Software Foundation',
+    author_email = 'foundation@djangoproject.com',
+    download_url = 'http://media.djangoproject.com/releases/0.96/Django-0.96.4.tar.gz',
     description = 'A high-level Python Web framework that encourages rapid development and clean, pragmatic design.',
     packages = packages,
     data_files = data_files,
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/basic/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,367 +0,0 @@
-"""
-1. Bare-bones model
-
-This is a basic model with only two non-primary-key fields.
-"""
-
-from django.db import models
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100, default='Default headline')
-    pub_date = models.DateTimeField()
-
-    class Meta:
-        ordering = ('pub_date','headline')
-
-    def __str__(self):
-        return self.headline
-
-__test__ = {'API_TESTS': """
-# No articles are in the system yet.
->>> Article.objects.all()
-[]
-
-# Create an Article.
->>> from datetime import datetime
->>> a = Article(id=None, headline='Area man programs in Python', pub_date=datetime(2005, 7, 28))
-
-# Save it into the database. You have to call save() explicitly.
->>> a.save()
-
-# Now it has an ID. Note it's a long integer, as designated by the trailing "L".
->>> a.id
-1L
-
-# Access database columns via Python attributes.
->>> a.headline
-'Area man programs in Python'
->>> a.pub_date
-datetime.datetime(2005, 7, 28, 0, 0)
-
-# Change values by changing the attributes, then calling save().
->>> a.headline = 'Area woman programs in Python'
->>> a.save()
-
-# Article.objects.all() returns all the articles in the database.
->>> Article.objects.all()
-[<Article: Area woman programs in Python>]
-
-# Django provides a rich database lookup API.
->>> Article.objects.get(id__exact=1)
-<Article: Area woman programs in Python>
->>> Article.objects.get(headline__startswith='Area woman')
-<Article: Area woman programs in Python>
->>> Article.objects.get(pub_date__year=2005)
-<Article: Area woman programs in Python>
->>> Article.objects.get(pub_date__year=2005, pub_date__month=7)
-<Article: Area woman programs in Python>
->>> Article.objects.get(pub_date__year=2005, pub_date__month=7, pub_date__day=28)
-<Article: Area woman programs in Python>
-
-# The "__exact" lookup type can be omitted, as a shortcut.
->>> Article.objects.get(id=1)
-<Article: Area woman programs in Python>
->>> Article.objects.get(headline='Area woman programs in Python')
-<Article: Area woman programs in Python>
-
->>> Article.objects.filter(pub_date__year=2005)
-[<Article: Area woman programs in Python>]
->>> Article.objects.filter(pub_date__year=2004)
-[]
->>> Article.objects.filter(pub_date__year=2005, pub_date__month=7)
-[<Article: Area woman programs in Python>]
-
-# Django raises an Article.DoesNotExist exception for get() if the parameters
-# don't match any object.
->>> Article.objects.get(id__exact=2)
-Traceback (most recent call last):
-    ...
-DoesNotExist: Article matching query does not exist.
-
->>> Article.objects.get(pub_date__year=2005, pub_date__month=8)
-Traceback (most recent call last):
-    ...
-DoesNotExist: Article matching query does not exist.
-
-# Lookup by a primary key is the most common case, so Django provides a
-# shortcut for primary-key exact lookups.
-# The following is identical to articles.get(id=1).
->>> Article.objects.get(pk=1)
-<Article: Area woman programs in Python>
-
-# pk can be used as a shortcut for the primary key name in any query
->>> Article.objects.filter(pk__in=[1])
-[<Article: Area woman programs in Python>]
-
-# Model instances of the same type and same ID are considered equal.
->>> a = Article.objects.get(pk=1)
->>> b = Article.objects.get(pk=1)
->>> a == b
-True
-
-# You can initialize a model instance using positional arguments, which should
-# match the field order as defined in the model.
->>> a2 = Article(None, 'Second article', datetime(2005, 7, 29))
->>> a2.save()
->>> a2.id
-2L
->>> a2.headline
-'Second article'
->>> a2.pub_date
-datetime.datetime(2005, 7, 29, 0, 0)
-
-# ...or, you can use keyword arguments.
->>> a3 = Article(id=None, headline='Third article', pub_date=datetime(2005, 7, 30))
->>> a3.save()
->>> a3.id
-3L
->>> a3.headline
-'Third article'
->>> a3.pub_date
-datetime.datetime(2005, 7, 30, 0, 0)
-
-# You can also mix and match position and keyword arguments, but be sure not to
-# duplicate field information.
->>> a4 = Article(None, 'Fourth article', pub_date=datetime(2005, 7, 31))
->>> a4.save()
->>> a4.headline
-'Fourth article'
-
-# Don't use invalid keyword arguments.
->>> a5 = Article(id=None, headline='Invalid', pub_date=datetime(2005, 7, 31), foo='bar')
-Traceback (most recent call last):
-    ...
-TypeError: 'foo' is an invalid keyword argument for this function
-
-# You can leave off the value for an AutoField when creating an object, because
-# it'll get filled in automatically when you save().
->>> a5 = Article(headline='Article 6', pub_date=datetime(2005, 7, 31))
->>> a5.save()
->>> a5.id
-5L
->>> a5.headline
-'Article 6'
-
-# If you leave off a field with "default" set, Django will use the default.
->>> a6 = Article(pub_date=datetime(2005, 7, 31))
->>> a6.save()
->>> a6.headline
-'Default headline'
-
-# For DateTimeFields, Django saves as much precision (in seconds) as you
-# give it.
->>> a7 = Article(headline='Article 7', pub_date=datetime(2005, 7, 31, 12, 30))
->>> a7.save()
->>> Article.objects.get(id__exact=7).pub_date
-datetime.datetime(2005, 7, 31, 12, 30)
-
->>> a8 = Article(headline='Article 8', pub_date=datetime(2005, 7, 31, 12, 30, 45))
->>> a8.save()
->>> Article.objects.get(id__exact=8).pub_date
-datetime.datetime(2005, 7, 31, 12, 30, 45)
->>> a8.id
-8L
-
-# Saving an object again doesn't create a new object -- it just saves the old one.
->>> a8.save()
->>> a8.id
-8L
->>> a8.headline = 'Updated article 8'
->>> a8.save()
->>> a8.id
-8L
-
->>> a7 == a8
-False
->>> a8 == Article.objects.get(id__exact=8)
-True
->>> a7 != a8
-True
->>> Article.objects.get(id__exact=8) != Article.objects.get(id__exact=7)
-True
->>> Article.objects.get(id__exact=8) == Article.objects.get(id__exact=7)
-False
-
-# dates() returns a list of available dates of the given scope for the given field.
->>> Article.objects.dates('pub_date', 'year')
-[datetime.datetime(2005, 1, 1, 0, 0)]
->>> Article.objects.dates('pub_date', 'month')
-[datetime.datetime(2005, 7, 1, 0, 0)]
->>> Article.objects.dates('pub_date', 'day')
-[datetime.datetime(2005, 7, 28, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 31, 0, 0)]
->>> Article.objects.dates('pub_date', 'day', order='ASC')
-[datetime.datetime(2005, 7, 28, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 31, 0, 0)]
->>> Article.objects.dates('pub_date', 'day', order='DESC')
-[datetime.datetime(2005, 7, 31, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 28, 0, 0)]
-
-# dates() requires valid arguments.
-
->>> Article.objects.dates()
-Traceback (most recent call last):
-   ...
-TypeError: dates() takes at least 3 arguments (1 given)
-
->>> Article.objects.dates('invalid_field', 'year')
-Traceback (most recent call last):
-   ...
-FieldDoesNotExist: Article has no field named 'invalid_field'
-
->>> Article.objects.dates('pub_date', 'bad_kind')
-Traceback (most recent call last):
-   ...
-AssertionError: 'kind' must be one of 'year', 'month' or 'day'.
-
->>> Article.objects.dates('pub_date', 'year', order='bad order')
-Traceback (most recent call last):
-   ...
-AssertionError: 'order' must be either 'ASC' or 'DESC'.
-
-# Use iterator() with dates() to return a generator that lazily requests each
-# result one at a time, to save memory.
->>> for a in Article.objects.dates('pub_date', 'day', order='DESC').iterator():
-...     print repr(a)
-datetime.datetime(2005, 7, 31, 0, 0)
-datetime.datetime(2005, 7, 30, 0, 0)
-datetime.datetime(2005, 7, 29, 0, 0)
-datetime.datetime(2005, 7, 28, 0, 0)
-
-# You can combine queries with & and |.
->>> s1 = Article.objects.filter(id__exact=1)
->>> s2 = Article.objects.filter(id__exact=2)
->>> s1 | s2
-[<Article: Area woman programs in Python>, <Article: Second article>]
->>> s1 & s2
-[]
-
-# You can get the number of objects like this:
->>> len(Article.objects.filter(id__exact=1))
-1
-
-# You can get items using index and slice notation.
->>> Article.objects.all()[0]
-<Article: Area woman programs in Python>
->>> Article.objects.all()[1:3]
-[<Article: Second article>, <Article: Third article>]
->>> s3 = Article.objects.filter(id__exact=3)
->>> (s1 | s2 | s3)[::2]
-[<Article: Area woman programs in Python>, <Article: Third article>]
-
-# Slices (without step) are lazy:
->>> Article.objects.all()[0:5].filter()
-[<Article: Area woman programs in Python>, <Article: Second article>, <Article: Third article>, <Article: Article 6>, <Article: Default headline>]
-
-# Slicing again works:
->>> Article.objects.all()[0:5][0:2]
-[<Article: Area woman programs in Python>, <Article: Second article>]
->>> Article.objects.all()[0:5][:2]
-[<Article: Area woman programs in Python>, <Article: Second article>]
->>> Article.objects.all()[0:5][4:]
-[<Article: Default headline>]
->>> Article.objects.all()[0:5][5:]
-[]
-
-# Some more tests!
->>> Article.objects.all()[2:][0:2]
-[<Article: Third article>, <Article: Article 6>]
->>> Article.objects.all()[2:][:2]
-[<Article: Third article>, <Article: Article 6>]
->>> Article.objects.all()[2:][2:3]
-[<Article: Default headline>]
-
-# Note that you can't use 'offset' without 'limit' (on some dbs), so this doesn't work:
->>> Article.objects.all()[2:]
-Traceback (most recent call last):
-    ...
-AssertionError: 'offset' is not allowed without 'limit'
-
-# Also, once you have sliced you can't filter, re-order or combine
->>> Article.objects.all()[0:5].filter(id=1)
-Traceback (most recent call last):
-    ...
-AssertionError: Cannot filter a query once a slice has been taken.
-
->>> Article.objects.all()[0:5].order_by('id')
-Traceback (most recent call last):
-    ...
-AssertionError: Cannot reorder a query once a slice has been taken.
-
->>> Article.objects.all()[0:1] & Article.objects.all()[4:5]
-Traceback (most recent call last):
-    ...
-AssertionError: Cannot combine queries once a slice has been taken.
-
-# Negative slices are not supported, due to database constraints.
-# (hint: inverting your ordering might do what you need).
->>> Article.objects.all()[-1]
-Traceback (most recent call last):
-    ...
-AssertionError: Negative indexing is not supported.
->>> Article.objects.all()[0:-5]
-Traceback (most recent call last):
-    ...
-AssertionError: Negative indexing is not supported.
-
-# An Article instance doesn't have access to the "objects" attribute.
-# That's only available on the class.
->>> a7.objects.all()
-Traceback (most recent call last):
-    ...
-AttributeError: Manager isn't accessible via Article instances
-
->>> a7.objects
-Traceback (most recent call last):
-    ...
-AttributeError: Manager isn't accessible via Article instances
-
-# Bulk delete test: How many objects before and after the delete?
->>> Article.objects.all()
-[<Article: Area woman programs in Python>, <Article: Second article>, <Article: Third article>, <Article: Article 6>, <Article: Default headline>, <Article: Fourth article>, <Article: Article 7>, <Article: Updated article 8>]
->>> Article.objects.filter(id__lte=4).delete()
->>> Article.objects.all()
-[<Article: Article 6>, <Article: Default headline>, <Article: Article 7>, <Article: Updated article 8>]
-"""}
-
-from django.conf import settings
-
-building_docs = getattr(settings, 'BUILDING_DOCS', False)
-
-if building_docs or settings.DATABASE_ENGINE == 'postgresql':
-    __test__['API_TESTS'] += """
-# In PostgreSQL, microsecond-level precision is available.
->>> a9 = Article(headline='Article 9', pub_date=datetime(2005, 7, 31, 12, 30, 45, 180))
->>> a9.save()
->>> Article.objects.get(id__exact=9).pub_date
-datetime.datetime(2005, 7, 31, 12, 30, 45, 180)
-"""
-
-if building_docs or settings.DATABASE_ENGINE == 'mysql':
-    __test__['API_TESTS'] += """
-# In MySQL, microsecond-level precision isn't available. You'll lose
-# microsecond-level precision once the data is saved.
->>> a9 = Article(headline='Article 9', pub_date=datetime(2005, 7, 31, 12, 30, 45, 180))
->>> a9.save()
->>> Article.objects.get(id__exact=9).pub_date
-datetime.datetime(2005, 7, 31, 12, 30, 45)
-"""
-
-__test__['API_TESTS'] += """
-
-# You can manually specify the primary key when creating a new object.
->>> a101 = Article(id=101, headline='Article 101', pub_date=datetime(2005, 7, 31, 12, 30, 45))
->>> a101.save()
->>> a101 = Article.objects.get(pk=101)
->>> a101.headline
-'Article 101'
-
-# You can create saved objects in a single step
->>> a10 = Article.objects.create(headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45))
->>> Article.objects.get(headline="Article 10")
-<Article: Article 10>
-
-# Edge-case test: A year lookup should retrieve all objects in the given
-year, including Jan. 1 and Dec. 31.
->>> a11 = Article.objects.create(headline='Article 11', pub_date=datetime(2008, 1, 1))
->>> a12 = Article.objects.create(headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999))
->>> Article.objects.filter(pub_date__year=2008)
-[<Article: Article 11>, <Article: Article 12>]
-"""
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/choices/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-"""
-21. Specifying 'choices' for a field
-
-Most fields take a ``choices`` parameter, which should be a tuple of tuples
-specifying which are the valid values for that field.
-
-For each field that has ``choices``, a model instance gets a
-``get_fieldname_display()`` method, where ``fieldname`` is the name of the
-field. This method returns the "human-readable" value of the field.
-"""
-
-from django.db import models
-
-GENDER_CHOICES = (
-    ('M', 'Male'),
-    ('F', 'Female'),
-)
-
-class Person(models.Model):
-    name = models.CharField(maxlength=20)
-    gender = models.CharField(maxlength=1, choices=GENDER_CHOICES)
-
-    def __str__(self):
-        return self.name
-
-__test__ = {'API_TESTS':"""
->>> a = Person(name='Adrian', gender='M')
->>> a.save()
->>> s = Person(name='Sara', gender='F')
->>> s.save()
->>> a.gender
-'M'
->>> s.gender
-'F'
->>> a.get_gender_display()
-'Male'
->>> s.get_gender_display()
-'Female'
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/custom_columns/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,105 +0,0 @@
-"""
-17. Custom column/table names
-
-If your database column name is different than your model attribute, use the
-``db_column`` parameter. Note that you'll use the field's name, not its column
-name, in API usage.
-
-If your database table name is different than your model name, use the
-``db_table`` Meta attribute. This has no effect on the API used to 
-query the database.
-
-If you need to use a table name for a many-to-many relationship that differs 
-from the default generated name, use the ``db_table`` parameter on the 
-ManyToMany field. This has no effect on the API for querying the database.
-
-"""
-
-from django.db import models
-
-class Author(models.Model):
-    first_name = models.CharField(maxlength=30, db_column='firstname')
-    last_name = models.CharField(maxlength=30, db_column='last')
-
-    def __str__(self):
-        return '%s %s' % (self.first_name, self.last_name)
-
-    class Meta:
-        db_table = 'my_author_table'
-        ordering = ('last_name','first_name')
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100)
-    authors = models.ManyToManyField(Author, db_table='my_m2m_table')
-
-    def __str__(self):
-        return self.headline
-
-    class Meta:
-        ordering = ('headline',)
-        
-__test__ = {'API_TESTS':"""
-# Create a Author.
->>> a = Author(first_name='John', last_name='Smith')
->>> a.save()
-
->>> a.id
-1
-
-# Create another author
->>> a2 = Author(first_name='Peter', last_name='Jones')
->>> a2.save()
-
-# Create an article
->>> art = Article(headline='Django lets you build web apps easily')
->>> art.save()
->>> art.authors = [a, a2]
-
-# Although the table and column names on Author have been set to 
-# custom values, nothing about using the Author model has changed...
-
-# Query the available authors
->>> Author.objects.all()
-[<Author: Peter Jones>, <Author: John Smith>]
-
->>> Author.objects.filter(first_name__exact='John')
-[<Author: John Smith>]
-
->>> Author.objects.get(first_name__exact='John')
-<Author: John Smith>
-
->>> Author.objects.filter(firstname__exact='John')
-Traceback (most recent call last):
-    ...
-TypeError: Cannot resolve keyword 'firstname' into field
-
->>> a = Author.objects.get(last_name__exact='Smith')
->>> a.first_name
-'John'
->>> a.last_name
-'Smith'
->>> a.firstname
-Traceback (most recent call last):
-    ...
-AttributeError: 'Author' object has no attribute 'firstname'
->>> a.last
-Traceback (most recent call last):
-    ...
-AttributeError: 'Author' object has no attribute 'last'
-
-# Although the Article table uses a custom m2m table, 
-# nothing about using the m2m relationship has changed...
-
-# Get all the authors for an article
->>> art.authors.all()
-[<Author: Peter Jones>, <Author: John Smith>]
-
-# Get the articles for an author
->>> a.article_set.all()
-[<Article: Django lets you build web apps easily>]
-
-# Query the authors across the m2m relation
->>> art.authors.filter(last_name='Jones')
-[<Author: Peter Jones>]
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/custom_managers/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,107 +0,0 @@
-"""
-23. Giving models a custom manager
-
-You can use a custom ``Manager`` in a particular model by extending the base
-``Manager`` class and instantiating your custom ``Manager`` in your model.
-
-There are two reasons you might want to customize a ``Manager``: to add extra
-``Manager`` methods, and/or to modify the initial ``QuerySet`` the ``Manager``
-returns.
-"""
-
-from django.db import models
-
-# An example of a custom manager called "objects".
-
-class PersonManager(models.Manager):
-    def get_fun_people(self):
-        return self.filter(fun=True)
-
-class Person(models.Model):
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    fun = models.BooleanField()
-    objects = PersonManager()
-
-    def __str__(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-# An example of a custom manager that sets get_query_set().
-
-class PublishedBookManager(models.Manager):
-    def get_query_set(self):
-        return super(PublishedBookManager, self).get_query_set().filter(is_published=True)
-
-class Book(models.Model):
-    title = models.CharField(maxlength=50)
-    author = models.CharField(maxlength=30)
-    is_published = models.BooleanField()
-    published_objects = PublishedBookManager()
-    authors = models.ManyToManyField(Person, related_name='books')
-
-    def __str__(self):
-        return self.title
-
-# An example of providing multiple custom managers.
-
-class FastCarManager(models.Manager):
-    def get_query_set(self):
-        return super(FastCarManager, self).get_query_set().filter(top_speed__gt=150)
-
-class Car(models.Model):
-    name = models.CharField(maxlength=10)
-    mileage = models.IntegerField()
-    top_speed = models.IntegerField(help_text="In miles per hour.")
-    cars = models.Manager()
-    fast_cars = FastCarManager()
-
-    def __str__(self):
-        return self.name
-
-__test__ = {'API_TESTS':"""
->>> p1 = Person(first_name='Bugs', last_name='Bunny', fun=True)
->>> p1.save()
->>> p2 = Person(first_name='Droopy', last_name='Dog', fun=False)
->>> p2.save()
->>> Person.objects.get_fun_people()
-[<Person: Bugs Bunny>]
-
-# The RelatedManager used on the 'books' descriptor extends the default manager
->>> from modeltests.custom_managers.models import PublishedBookManager
->>> isinstance(p2.books, PublishedBookManager)
-True
-
->>> b1 = Book(title='How to program', author='Rodney Dangerfield', is_published=True)
->>> b1.save()
->>> b2 = Book(title='How to be smart', author='Albert Einstein', is_published=False)
->>> b2.save()
-
-# The default manager, "objects", doesn't exist,
-# because a custom one was provided.
->>> Book.objects
-Traceback (most recent call last):
-    ...
-AttributeError: type object 'Book' has no attribute 'objects'
-
-# The RelatedManager used on the 'authors' descriptor extends the default manager
->>> from modeltests.custom_managers.models import PersonManager
->>> isinstance(b2.authors, PersonManager)
-True
-
->>> Book.published_objects.all()
-[<Book: How to program>]
-
->>> c1 = Car(name='Corvette', mileage=21, top_speed=180)
->>> c1.save()
->>> c2 = Car(name='Neon', mileage=31, top_speed=100)
->>> c2.save()
->>> Car.cars.order_by('name')
-[<Car: Corvette>, <Car: Neon>]
->>> Car.fast_cars.all()
-[<Car: Corvette>]
-
-# Each model class gets a "_default_manager" attribute, which is a reference
-# to the first manager defined in the class. In this case, it's "cars".
->>> Car._default_manager.order_by('name')
-[<Car: Corvette>, <Car: Neon>]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/custom_methods/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,58 +0,0 @@
-"""
-3. Giving models custom methods
-
-Any method you add to a model will be available to instances.
-"""
-
-from django.db import models
-import datetime
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100)
-    pub_date = models.DateField()
-
-    def __str__(self):
-        return self.headline
-
-    def was_published_today(self):
-        return self.pub_date == datetime.date.today()
-
-    def articles_from_same_day_1(self):
-        return Article.objects.filter(pub_date=self.pub_date).exclude(id=self.id)
-
-    def articles_from_same_day_2(self):
-        """
-        Verbose version of get_articles_from_same_day_1, which does a custom
-        database query for the sake of demonstration.
-        """
-        from django.db import connection
-        cursor = connection.cursor()
-        cursor.execute("""
-            SELECT id, headline, pub_date
-            FROM custom_methods_article
-            WHERE pub_date = %s
-                AND id != %s""", [str(self.pub_date), self.id])
-        # The asterisk in "(*row)" tells Python to expand the list into
-        # positional arguments to Article().
-        return [self.__class__(*row) for row in cursor.fetchall()]
-
-__test__ = {'API_TESTS':"""
-# Create a couple of Articles.
->>> from datetime import date
->>> a = Article(id=None, headline='Area man programs in Python', pub_date=date(2005, 7, 27))
->>> a.save()
->>> b = Article(id=None, headline='Beatles reunite', pub_date=date(2005, 7, 27))
->>> b.save()
-
-# Test the custom methods.
->>> a.was_published_today()
-False
->>> a.articles_from_same_day_1()
-[<Article: Beatles reunite>]
->>> a.articles_from_same_day_2()
-[<Article: Beatles reunite>]
->>> b.articles_from_same_day_1()
-[<Article: Area man programs in Python>]
->>> b.articles_from_same_day_2()
-[<Article: Area man programs in Python>]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/custom_pk/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,95 +0,0 @@
-"""
-14. Using a custom primary key
-
-By default, Django adds an ``"id"`` field to each model. But you can override
-this behavior by explicitly adding ``primary_key=True`` to a field.
-"""
-
-from django.db import models
-
-class Employee(models.Model):
-    employee_code = models.CharField(maxlength=10, primary_key=True,
-            db_column = 'code')
-    first_name = models.CharField(maxlength=20)
-    last_name = models.CharField(maxlength=20)
-    class Meta:
-        ordering = ('last_name', 'first_name')
-
-    def __str__(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-class Business(models.Model):
-    name = models.CharField(maxlength=20, primary_key=True)
-    employees = models.ManyToManyField(Employee)
-    class Meta:
-        verbose_name_plural = 'businesses'
-
-    def __str__(self):
-        return self.name
-
-__test__ = {'API_TESTS':"""
->>> dan = Employee(employee_code='ABC123', first_name='Dan', last_name='Jones')
->>> dan.save()
->>> Employee.objects.all()
-[<Employee: Dan Jones>]
-
->>> fran = Employee(employee_code='XYZ456', first_name='Fran', last_name='Bones')
->>> fran.save()
->>> Employee.objects.all()
-[<Employee: Fran Bones>, <Employee: Dan Jones>]
-
->>> Employee.objects.get(pk='ABC123')
-<Employee: Dan Jones>
->>> Employee.objects.get(pk='XYZ456')
-<Employee: Fran Bones>
->>> Employee.objects.get(pk='foo')
-Traceback (most recent call last):
-    ...
-DoesNotExist: Employee matching query does not exist.
-
-# Use the name of the primary key, rather than pk.
->>> Employee.objects.get(employee_code__exact='ABC123')
-<Employee: Dan Jones>
-
-# pk can be used as a substitute for the primary key.
->>> Employee.objects.filter(pk__in=['ABC123','XYZ456'])
-[<Employee: Fran Bones>, <Employee: Dan Jones>]
-
-# Fran got married and changed her last name.
->>> fran = Employee.objects.get(pk='XYZ456')
->>> fran.last_name = 'Jones'
->>> fran.save()
->>> Employee.objects.filter(last_name__exact='Jones')
-[<Employee: Dan Jones>, <Employee: Fran Jones>]
->>> Employee.objects.in_bulk(['ABC123', 'XYZ456'])
-{'XYZ456': <Employee: Fran Jones>, 'ABC123': <Employee: Dan Jones>}
-
->>> b = Business(name='Sears')
->>> b.save()
->>> b.employees.add(dan, fran)
->>> b.employees.all()
-[<Employee: Dan Jones>, <Employee: Fran Jones>]
->>> fran.business_set.all()
-[<Business: Sears>]
->>> Business.objects.in_bulk(['Sears'])
-{'Sears': <Business: Sears>}
-
->>> Business.objects.filter(name__exact='Sears')
-[<Business: Sears>]
->>> Business.objects.filter(pk='Sears')
-[<Business: Sears>]
-
-# Queries across tables, involving primary key
->>> Employee.objects.filter(business__name__exact='Sears')
-[<Employee: Dan Jones>, <Employee: Fran Jones>]
->>> Employee.objects.filter(business__pk='Sears')
-[<Employee: Dan Jones>, <Employee: Fran Jones>]
-
->>> Business.objects.filter(employees__employee_code__exact='ABC123')
-[<Business: Sears>]
->>> Business.objects.filter(employees__pk='ABC123')
-[<Business: Sears>]
->>> Business.objects.filter(employees__first_name__startswith='Fran')
-[<Business: Sears>]
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/empty/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,26 +0,0 @@
-"""
-39. Empty model tests
-
-These test that things behave sensibly for the rare corner-case of a model with
-no fields.
-"""
-
-from django.db import models
-
-class Empty(models.Model):
-    pass
-
-__test__ = {'API_TESTS':"""
->>> m = Empty()
->>> m.id
->>> m.save()
->>> m2 = Empty()
->>> m2.save()
->>> len(Empty.objects.all())
-2
->>> m.id is not None
-True
->>> existing = Empty(m.id)
->>> existing.save()
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/field_defaults/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,51 +0,0 @@
-"""
-32. Callable defaults
-
-You can pass callable objects as the ``default`` parameter to a field. When
-the object is created without an explicit value passed in, Django will call
-the method to determine the default value.
-
-This example uses ``datetime.datetime.now`` as the default for the ``pub_date``
-field.
-"""
-
-from django.db import models
-from datetime import datetime
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100, default='Default headline')
-    pub_date = models.DateTimeField(default=datetime.now)
-
-    def __str__(self):
-        return self.headline
-
-__test__ = {'API_TESTS':"""
->>> from datetime import datetime
-
-# No articles are in the system yet.
->>> Article.objects.all()
-[]
-
-# Create an Article.
->>> a = Article(id=None)
-
-# Grab the current datetime it should be very close to the default that just
-# got saved as a.pub_date
->>> now = datetime.now()
-
-# Save it into the database. You have to call save() explicitly.
->>> a.save()
-
-# Now it has an ID. Note it's a long integer, as designated by the trailing "L".
->>> a.id
-1L
-
-# Access database columns via Python attributes.
->>> a.headline
-'Default headline'
-
-# make sure the two dates are sufficiently close
->>> d = now - a.pub_date
->>> d.seconds < 5
-True
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-
-
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/fixtures/fixture1.json	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,18 +0,0 @@
-[
-    {
-        "pk": "2", 
-        "model": "fixtures.article", 
-        "fields": {
-            "headline": "Poker has no place on ESPN", 
-            "pub_date": "2006-06-16 12:00:00"
-        }
-    }, 
-    {
-        "pk": "3", 
-        "model": "fixtures.article", 
-        "fields": {
-            "headline": "Time to reform copyright", 
-            "pub_date": "2006-06-16 13:00:00"
-        }
-    }
-]
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/fixtures/fixture2.json	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,18 +0,0 @@
-[
-    {
-        "pk": "3", 
-        "model": "fixtures.article", 
-        "fields": {
-            "headline": "Copyright is fine the way it is", 
-            "pub_date": "2006-06-16 14:00:00"
-        }
-    }, 
-    {
-        "pk": "4", 
-        "model": "fixtures.article", 
-        "fields": {
-            "headline": "Django conquers world!", 
-            "pub_date": "2006-06-16 15:00:00"
-        }
-    }
-]
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/fixtures/fixture2.xml	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<django-objects version="1.0">
-    <object pk="2" model="fixtures.article">
-        <field type="CharField" name="headline">Poker on TV is great!</field>
-        <field type="DateTimeField" name="pub_date">2006-06-16 11:00:00</field>
-    </object>
-    <object pk="5" model="fixtures.article">
-        <field type="CharField" name="headline">XML identified as leading cause of cancer</field>
-        <field type="DateTimeField" name="pub_date">2006-06-16 16:00:00</field>
-    </object>
-</django-objects>
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/fixtures/fixture3.xml	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<django-objects version="1.0">
-    <object pk="2" model="fixtures.article">
-        <field type="CharField" name="headline">Poker on TV is great!</field>
-        <field type="DateTimeField" name="pub_date">2006-06-16 11:00:00</field>
-    </object>
-    <object pk="5" model="fixtures.article">
-        <field type="CharField" name="headline">XML identified as leading cause of cancer</field>
-        <field type="DateTimeField" name="pub_date">2006-06-16 16:00:00</field>
-    </object>
-</django-objects>
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/fixtures/initial_data.json	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-[
-    {
-        "pk": "1", 
-        "model": "fixtures.article", 
-        "fields": {
-            "headline": "Python program becomes self aware", 
-            "pub_date": "2006-06-16 11:00:00"
-        }
-    }
-]
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/fixtures/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,88 +0,0 @@
-"""
-37. Fixtures.
-
-Fixtures are a way of loading data into the database in bulk. Fixure data 
-can be stored in any serializable format (including JSON and XML). Fixtures 
-are identified by name, and are stored in either a directory named 'fixtures'
-in the application directory, on in one of the directories named in the 
-FIXTURE_DIRS setting.
-"""
-
-from django.db import models
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100, default='Default headline')
-    pub_date = models.DateTimeField()
-
-    def __str__(self):
-        return self.headline
-        
-    class Meta:
-        ordering = ('-pub_date', 'headline')
-        
-__test__ = {'API_TESTS': """
->>> from django.core import management
->>> from django.db.models import get_app
-
-# Reset the database representation of this app. 
-# This will return the database to a clean initial state.
->>> management.flush(verbosity=0, interactive=False)
-
-# Syncdb introduces 1 initial data object from initial_data.json.
->>> Article.objects.all()
-[<Article: Python program becomes self aware>]
-
-# Load fixture 1. Single JSON file, with two objects.
->>> management.load_data(['fixture1.json'], verbosity=0)
->>> Article.objects.all()
-[<Article: Time to reform copyright>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]
-
-# Load fixture 2. JSON file imported by default. Overwrites some existing objects
->>> management.load_data(['fixture2.json'], verbosity=0)
->>> Article.objects.all()
-[<Article: Django conquers world!>, <Article: Copyright is fine the way it is>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]
-
-# Load fixture 3, XML format. 
->>> management.load_data(['fixture3.xml'], verbosity=0)
->>> Article.objects.all()
-[<Article: XML identified as leading cause of cancer>, <Article: Django conquers world!>, <Article: Copyright is fine the way it is>, <Article: Poker on TV is great!>, <Article: Python program becomes self aware>]
-
-# Load a fixture that doesn't exist
->>> management.load_data(['unknown.json'], verbosity=0)
-
-# object list is unaffected
->>> Article.objects.all()
-[<Article: XML identified as leading cause of cancer>, <Article: Django conquers world!>, <Article: Copyright is fine the way it is>, <Article: Poker on TV is great!>, <Article: Python program becomes self aware>]
-
-# Reset the database representation of this app. This will delete all data.
->>> management.flush(verbosity=0, interactive=False)
->>> Article.objects.all()
-[<Article: Python program becomes self aware>]
-
-# Load fixture 1 again, using format discovery
->>> management.load_data(['fixture1'], verbosity=0)
->>> Article.objects.all()
-[<Article: Time to reform copyright>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]
-
-# Try to load fixture 2 using format discovery; this will fail
-# because there are two fixture2's in the fixtures directory 
->>> management.load_data(['fixture2'], verbosity=0) # doctest: +ELLIPSIS
-Multiple fixtures named 'fixture2' in '...fixtures'. Aborting.
-
->>> Article.objects.all()
-[<Article: Time to reform copyright>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]
-
-# Dump the current contents of the database as a JSON fixture
->>> print management.dump_data(['fixtures'], format='json')
-[{"pk": "3", "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": "2", "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": "1", "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]
-"""}
-
-from django.test import TestCase
-
-class SampleTestCase(TestCase):
-    fixtures = ['fixture1.json', 'fixture2.json']
-        
-    def testClassFixtures(self):
-        "Check that test case has installed 4 fixture objects"
-        self.assertEqual(Article.objects.count(), 4)
-        self.assertEquals(str(Article.objects.all()), "[<Article: Django conquers world!>, <Article: Copyright is fine the way it is>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]")
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/generic_relations/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,134 +0,0 @@
-"""
-34. Generic relations
-
-Generic relations let an object have a foreign key to any object through a
-content-type/object-id field. A generic foreign key can point to any object,
-be it animal, vegetable, or mineral.
-
-The canonical example is tags (although this example implementation is *far*
-from complete).
-"""
-
-from django.db import models
-from django.contrib.contenttypes.models import ContentType
-
-class TaggedItem(models.Model):
-    """A tag on an item."""
-    tag = models.SlugField()
-    content_type = models.ForeignKey(ContentType)
-    object_id = models.PositiveIntegerField()
-    
-    content_object = models.GenericForeignKey()
-    
-    class Meta:
-        ordering = ["tag"]
-    
-    def __str__(self):
-        return self.tag
-
-class Animal(models.Model):
-    common_name = models.CharField(maxlength=150)
-    latin_name = models.CharField(maxlength=150)
-    
-    tags = models.GenericRelation(TaggedItem)
-
-    def __str__(self):
-        return self.common_name
-        
-class Vegetable(models.Model):
-    name = models.CharField(maxlength=150)
-    is_yucky = models.BooleanField(default=True)
-    
-    tags = models.GenericRelation(TaggedItem)
-    
-    def __str__(self):
-        return self.name
-    
-class Mineral(models.Model):
-    name = models.CharField(maxlength=150)
-    hardness = models.PositiveSmallIntegerField()
-    
-    # note the lack of an explicit GenericRelation here...
-    
-    def __str__(self):
-        return self.name
-        
-__test__ = {'API_TESTS':"""
-# Create the world in 7 lines of code...
->>> lion = Animal(common_name="Lion", latin_name="Panthera leo")
->>> platypus = Animal(common_name="Platypus", latin_name="Ornithorhynchus anatinus")
->>> eggplant = Vegetable(name="Eggplant", is_yucky=True)
->>> bacon = Vegetable(name="Bacon", is_yucky=False)
->>> quartz = Mineral(name="Quartz", hardness=7)
->>> for o in (lion, platypus, eggplant, bacon, quartz):
-...     o.save()
-
-# Objects with declared GenericRelations can be tagged directly -- the API
-# mimics the many-to-many API.
->>> bacon.tags.create(tag="fatty")
-<TaggedItem: fatty>
->>> bacon.tags.create(tag="salty")
-<TaggedItem: salty>
->>> lion.tags.create(tag="yellow")
-<TaggedItem: yellow>
->>> lion.tags.create(tag="hairy")
-<TaggedItem: hairy>
-
->>> lion.tags.all()
-[<TaggedItem: hairy>, <TaggedItem: yellow>]
->>> bacon.tags.all()
-[<TaggedItem: fatty>, <TaggedItem: salty>]
-
-# You can easily access the content object like a foreign key.
->>> t = TaggedItem.objects.get(tag="salty")
->>> t.content_object
-<Vegetable: Bacon>
-
-# Recall that the Mineral class doesn't have an explicit GenericRelation
-# defined. That's OK, because you can create TaggedItems explicitly.
->>> tag1 = TaggedItem(content_object=quartz, tag="shiny")
->>> tag2 = TaggedItem(content_object=quartz, tag="clearish")
->>> tag1.save()
->>> tag2.save()
-
-# However, excluding GenericRelations means your lookups have to be a bit more
-# explicit.
->>> from django.contrib.contenttypes.models import ContentType
->>> ctype = ContentType.objects.get_for_model(quartz)
->>> TaggedItem.objects.filter(content_type__pk=ctype.id, object_id=quartz.id)
-[<TaggedItem: clearish>, <TaggedItem: shiny>]
-
-# You can set a generic foreign key in the way you'd expect.
->>> tag1.content_object = platypus
->>> tag1.save()
->>> platypus.tags.all()
-[<TaggedItem: shiny>]
->>> TaggedItem.objects.filter(content_type__pk=ctype.id, object_id=quartz.id)
-[<TaggedItem: clearish>]
-
-# If you delete an object with an explicit Generic relation, the related
-# objects are deleted when the source object is deleted.
-# Original list of tags:
->>> [(t.tag, t.content_type, t.object_id) for t in TaggedItem.objects.all()]
-[('clearish', <ContentType: mineral>, 1), ('fatty', <ContentType: vegetable>, 2), ('hairy', <ContentType: animal>, 1), ('salty', <ContentType: vegetable>, 2), ('shiny', <ContentType: animal>, 2), ('yellow', <ContentType: animal>, 1)]
-
->>> lion.delete()
->>> [(t.tag, t.content_type, t.object_id) for t in TaggedItem.objects.all()]
-[('clearish', <ContentType: mineral>, 1), ('fatty', <ContentType: vegetable>, 2), ('salty', <ContentType: vegetable>, 2), ('shiny', <ContentType: animal>, 2)]
-
-# If Generic Relation is not explicitly defined, any related objects 
-# remain after deletion of the source object.
->>> quartz.delete()
->>> [(t.tag, t.content_type, t.object_id) for t in TaggedItem.objects.all()]
-[('clearish', <ContentType: mineral>, 1), ('fatty', <ContentType: vegetable>, 2), ('salty', <ContentType: vegetable>, 2), ('shiny', <ContentType: animal>, 2)]
-
-# If you delete a tag, the objects using the tag are unaffected 
-# (other than losing a tag)
->>> tag = TaggedItem.objects.get(id=1)
->>> tag.delete()
->>> bacon.tags.all()
-[<TaggedItem: salty>]
->>> [(t.tag, t.content_type, t.object_id) for t in TaggedItem.objects.all()]
-[('clearish', <ContentType: mineral>, 1), ('salty', <ContentType: vegetable>, 2), ('shiny', <ContentType: animal>, 2)]
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/get_latest/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,79 +0,0 @@
-"""
-8. get_latest_by
-
-Models can have a ``get_latest_by`` attribute, which should be set to the name
-of a DateField or DateTimeField. If ``get_latest_by`` exists, the model's
-manager will get a ``latest()`` method, which will return the latest object in
-the database according to that field. "Latest" means "having the date farthest
-into the future."
-"""
-
-from django.db import models
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100)
-    pub_date = models.DateField()
-    expire_date = models.DateField()
-    class Meta:
-        get_latest_by = 'pub_date'
-
-    def __str__(self):
-        return self.headline
-
-class Person(models.Model):
-    name = models.CharField(maxlength=30)
-    birthday = models.DateField()
-
-    # Note that this model doesn't have "get_latest_by" set.
-
-    def __str__(self):
-        return self.name
-
-__test__ = {'API_TESTS':"""
-# Because no Articles exist yet, latest() raises ArticleDoesNotExist.
->>> Article.objects.latest()
-Traceback (most recent call last):
-    ...
-DoesNotExist: Article matching query does not exist.
-
-# Create a couple of Articles.
->>> from datetime import datetime
->>> a1 = Article(headline='Article 1', pub_date=datetime(2005, 7, 26), expire_date=datetime(2005, 9, 1))
->>> a1.save()
->>> a2 = Article(headline='Article 2', pub_date=datetime(2005, 7, 27), expire_date=datetime(2005, 7, 28))
->>> a2.save()
->>> a3 = Article(headline='Article 3', pub_date=datetime(2005, 7, 27), expire_date=datetime(2005, 8, 27))
->>> a3.save()
->>> a4 = Article(headline='Article 4', pub_date=datetime(2005, 7, 28), expire_date=datetime(2005, 7, 30))
->>> a4.save()
-
-# Get the latest Article.
->>> Article.objects.latest()
-<Article: Article 4>
-
-# Get the latest Article that matches certain filters.
->>> Article.objects.filter(pub_date__lt=datetime(2005, 7, 27)).latest()
-<Article: Article 1>
-
-# Pass a custom field name to latest() to change the field that's used to
-# determine the latest object.
->>> Article.objects.latest('expire_date')
-<Article: Article 1>
-
->>> Article.objects.filter(pub_date__gt=datetime(2005, 7, 26)).latest('expire_date')
-<Article: Article 3>
-
-# You can still use latest() with a model that doesn't have "get_latest_by"
-# set -- just pass in the field name manually.
->>> p1 = Person(name='Ralph', birthday=datetime(1950, 1, 1))
->>> p1.save()
->>> p2 = Person(name='Stephanie', birthday=datetime(1960, 2, 3))
->>> p2.save()
->>> Person.objects.latest()
-Traceback (most recent call last):
-    ...
-AssertionError: latest() requires either a field_name parameter or 'get_latest_by' in the model
-
->>> Person.objects.latest('birthday')
-<Person: Stephanie>
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/get_object_or_404/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,86 +0,0 @@
-"""
-35. DB-API Shortcuts
-
-get_object_or_404 is a shortcut function to be used in view functions for
-performing a get() lookup and raising a Http404 exception if a DoesNotExist
-exception was rasied during the get() call.
-
-get_list_or_404 is a shortcut function to be used in view functions for
-performing a filter() lookup and raising a Http404 exception if a DoesNotExist
-exception was rasied during the filter() call.
-"""
-
-from django.db import models
-from django.http import Http404
-from django.shortcuts import get_object_or_404, get_list_or_404
-
-class Author(models.Model):
-    name = models.CharField(maxlength=50)
-    
-    def __str__(self):
-        return self.name
-
-class ArticleManager(models.Manager):
-    def get_query_set(self):
-        return super(ArticleManager, self).get_query_set().filter(authors__name__icontains='sir')
-
-class Article(models.Model):
-    authors = models.ManyToManyField(Author)
-    title = models.CharField(maxlength=50)
-    objects = models.Manager()
-    by_a_sir = ArticleManager()
-    
-    def __str__(self):
-        return self.title
-
-__test__ = {'API_TESTS':"""
-# Create some Authors.
->>> a = Author.objects.create(name="Brave Sir Robin")
->>> a.save()
->>> a2 = Author.objects.create(name="Patsy")
->>> a2.save()
-
-# No Articles yet, so we should get a Http404 error.
->>> get_object_or_404(Article, title="Foo")
-Traceback (most recent call last):
-...
-Http404: No Article matches the given query.
-
-# Create an Article.
->>> article = Article.objects.create(title="Run away!")
->>> article.authors = [a, a2]
->>> article.save()
-
-# get_object_or_404 can be passed a Model to query.
->>> get_object_or_404(Article, title__contains="Run")
-<Article: Run away!>
-
-# We can also use the the Article manager through an Author object.
->>> get_object_or_404(a.article_set, title__contains="Run")
-<Article: Run away!>
-
-# No articles containing "Camelot".  This should raise a Http404 error.
->>> get_object_or_404(a.article_set, title__contains="Camelot")
-Traceback (most recent call last):
-...
-Http404: No Article matches the given query.
-
-# Custom managers can be used too.
->>> get_object_or_404(Article.by_a_sir, title="Run away!")
-<Article: Run away!>
-
-# get_list_or_404 can be used to get lists of objects
->>> get_list_or_404(a.article_set, title__icontains='Run')
-[<Article: Run away!>]
-
-# Http404 is returned if the list is empty
->>> get_list_or_404(a.article_set, title__icontains='Shrubbery')
-Traceback (most recent call last):
-...
-Http404: No Article matches the given query.
-
-# Custom managers can be used too.
->>> get_list_or_404(Article.by_a_sir, title__icontains="Run")
-[<Article: Run away!>]
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/get_or_create/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,52 +0,0 @@
-"""
-33. get_or_create()
-
-get_or_create() does what it says: it tries to look up an object with the given
-parameters. If an object isn't found, it creates one with the given parameters.
-"""
-
-from django.db import models
-
-class Person(models.Model):
-    first_name = models.CharField(maxlength=100)
-    last_name = models.CharField(maxlength=100)
-    birthday = models.DateField()
-
-    def __str__(self):
-        return '%s %s' % (self.first_name, self.last_name)
-
-__test__ = {'API_TESTS':"""
-# Acting as a divine being, create an Person.
->>> from datetime import date
->>> p = Person(first_name='John', last_name='Lennon', birthday=date(1940, 10, 9))
->>> p.save()
-
-# Only one Person is in the database at this point.
->>> Person.objects.count()
-1
-
-# get_or_create() a person with similar first names.
->>> p, created = Person.objects.get_or_create(first_name='John', last_name='Lennon', defaults={'birthday': date(1940, 10, 9)})
-
-# get_or_create() didn't have to create an object.
->>> created
-False
-
-# There's still only one Person in the database.
->>> Person.objects.count()
-1
-
-# get_or_create() a Person with a different name.
->>> p, created = Person.objects.get_or_create(first_name='George', last_name='Harrison', defaults={'birthday': date(1943, 2, 25)})
->>> created
-True
->>> Person.objects.count()
-2
-
-# If we execute the exact same statement, it won't create a Person.
->>> p, created = Person.objects.get_or_create(first_name='George', last_name='Harrison', defaults={'birthday': date(1943, 2, 25)})
->>> created
-False
->>> Person.objects.count()
-2
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/invalid_models/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/invalid_models/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,182 +0,0 @@
-"""
-26. Invalid models
-
-This example exists purely to point out errors in models.
-"""
-
-from django.db import models
-
-class FieldErrors(models.Model):
-    charfield = models.CharField()
-    floatfield = models.FloatField()
-    filefield = models.FileField()
-    prepopulate = models.CharField(maxlength=10, prepopulate_from='bad')
-    choices = models.CharField(maxlength=10, choices='bad')
-    choices2 = models.CharField(maxlength=10, choices=[(1,2,3),(1,2,3)])
-    index = models.CharField(maxlength=10, db_index='bad')
-
-class Target(models.Model):
-    tgt_safe = models.CharField(maxlength=10)
-    clash1 = models.CharField(maxlength=10)
-    clash2 = models.CharField(maxlength=10)
-
-    clash1_set = models.CharField(maxlength=10)
-
-class Clash1(models.Model):
-    src_safe = models.CharField(maxlength=10, core=True)
-
-    foreign = models.ForeignKey(Target)
-    m2m = models.ManyToManyField(Target)
-
-class Clash2(models.Model):
-    src_safe = models.CharField(maxlength=10, core=True)
-
-    foreign_1 = models.ForeignKey(Target, related_name='id')
-    foreign_2 = models.ForeignKey(Target, related_name='src_safe')
-
-    m2m_1 = models.ManyToManyField(Target, related_name='id')
-    m2m_2 = models.ManyToManyField(Target, related_name='src_safe')
-
-class Target2(models.Model):
-    clash3 = models.CharField(maxlength=10)
-    foreign_tgt = models.ForeignKey(Target)
-    clashforeign_set = models.ForeignKey(Target)
-
-    m2m_tgt = models.ManyToManyField(Target)
-    clashm2m_set = models.ManyToManyField(Target)
-
-class Clash3(models.Model):
-    src_safe = models.CharField(maxlength=10, core=True)
-    
-    foreign_1 = models.ForeignKey(Target2, related_name='foreign_tgt')
-    foreign_2 = models.ForeignKey(Target2, related_name='m2m_tgt')
-
-    m2m_1 = models.ManyToManyField(Target2, related_name='foreign_tgt')
-    m2m_2 = models.ManyToManyField(Target2, related_name='m2m_tgt')
-
-class ClashForeign(models.Model):
-    foreign = models.ForeignKey(Target2)
-
-class ClashM2M(models.Model):
-    m2m = models.ManyToManyField(Target2)
-
-class SelfClashForeign(models.Model):
-    src_safe = models.CharField(maxlength=10, core=True)
-    selfclashforeign = models.CharField(maxlength=10)
-
-    selfclashforeign_set = models.ForeignKey("SelfClashForeign")
-    foreign_1 = models.ForeignKey("SelfClashForeign", related_name='id')
-    foreign_2 = models.ForeignKey("SelfClashForeign", related_name='src_safe')
-
-class ValidM2M(models.Model):
-    src_safe = models.CharField(maxlength=10)
-    validm2m = models.CharField(maxlength=10)
-
-    # M2M fields are symmetrical by default. Symmetrical M2M fields
-    # on self don't require a related accessor, so many potential
-    # clashes are avoided.
-    validm2m_set = models.ManyToManyField("ValidM2M")
-    
-    m2m_1 = models.ManyToManyField("ValidM2M", related_name='id')
-    m2m_2 = models.ManyToManyField("ValidM2M", related_name='src_safe')
-
-    m2m_3 = models.ManyToManyField('self')
-    m2m_4 = models.ManyToManyField('self')
-
-class SelfClashM2M(models.Model):
-    src_safe = models.CharField(maxlength=10)
-    selfclashm2m = models.CharField(maxlength=10)
-
-    # Non-symmetrical M2M fields _do_ have related accessors, so 
-    # there is potential for clashes.
-    selfclashm2m_set = models.ManyToManyField("SelfClashM2M", symmetrical=False)
-    
-    m2m_1 = models.ManyToManyField("SelfClashM2M", related_name='id', symmetrical=False)
-    m2m_2 = models.ManyToManyField("SelfClashM2M", related_name='src_safe', symmetrical=False)
-
-    m2m_3 = models.ManyToManyField('self', symmetrical=False)
-    m2m_4 = models.ManyToManyField('self', symmetrical=False)
-
-model_errors = """invalid_models.fielderrors: "charfield": CharFields require a "maxlength" attribute.
-invalid_models.fielderrors: "floatfield": FloatFields require a "decimal_places" attribute.
-invalid_models.fielderrors: "floatfield": FloatFields require a "max_digits" attribute.
-invalid_models.fielderrors: "filefield": FileFields require an "upload_to" attribute.
-invalid_models.fielderrors: "prepopulate": prepopulate_from should be a list or tuple.
-invalid_models.fielderrors: "choices": "choices" should be iterable (e.g., a tuple or list).
-invalid_models.fielderrors: "choices2": "choices" should be a sequence of two-tuples.
-invalid_models.fielderrors: "choices2": "choices" should be a sequence of two-tuples.
-invalid_models.fielderrors: "index": "db_index" should be either None, True or False.
-invalid_models.clash1: Accessor for field 'foreign' clashes with field 'Target.clash1_set'. Add a related_name argument to the definition for 'foreign'.
-invalid_models.clash1: Accessor for field 'foreign' clashes with related m2m field 'Target.clash1_set'. Add a related_name argument to the definition for 'foreign'.
-invalid_models.clash1: Reverse query name for field 'foreign' clashes with field 'Target.clash1'. Add a related_name argument to the definition for 'foreign'.
-invalid_models.clash1: Accessor for m2m field 'm2m' clashes with field 'Target.clash1_set'. Add a related_name argument to the definition for 'm2m'.
-invalid_models.clash1: Accessor for m2m field 'm2m' clashes with related field 'Target.clash1_set'. Add a related_name argument to the definition for 'm2m'.
-invalid_models.clash1: Reverse query name for m2m field 'm2m' clashes with field 'Target.clash1'. Add a related_name argument to the definition for 'm2m'.
-invalid_models.clash2: Accessor for field 'foreign_1' clashes with field 'Target.id'. Add a related_name argument to the definition for 'foreign_1'.
-invalid_models.clash2: Accessor for field 'foreign_1' clashes with related m2m field 'Target.id'. Add a related_name argument to the definition for 'foreign_1'.
-invalid_models.clash2: Reverse query name for field 'foreign_1' clashes with field 'Target.id'. Add a related_name argument to the definition for 'foreign_1'.
-invalid_models.clash2: Reverse query name for field 'foreign_1' clashes with related m2m field 'Target.id'. Add a related_name argument to the definition for 'foreign_1'.
-invalid_models.clash2: Accessor for field 'foreign_2' clashes with related m2m field 'Target.src_safe'. Add a related_name argument to the definition for 'foreign_2'.
-invalid_models.clash2: Reverse query name for field 'foreign_2' clashes with related m2m field 'Target.src_safe'. Add a related_name argument to the definition for 'foreign_2'.
-invalid_models.clash2: Accessor for m2m field 'm2m_1' clashes with field 'Target.id'. Add a related_name argument to the definition for 'm2m_1'.
-invalid_models.clash2: Accessor for m2m field 'm2m_1' clashes with related field 'Target.id'. Add a related_name argument to the definition for 'm2m_1'.
-invalid_models.clash2: Reverse query name for m2m field 'm2m_1' clashes with field 'Target.id'. Add a related_name argument to the definition for 'm2m_1'.
-invalid_models.clash2: Reverse query name for m2m field 'm2m_1' clashes with related field 'Target.id'. Add a related_name argument to the definition for 'm2m_1'.
-invalid_models.clash2: Accessor for m2m field 'm2m_2' clashes with related field 'Target.src_safe'. Add a related_name argument to the definition for 'm2m_2'.
-invalid_models.clash2: Reverse query name for m2m field 'm2m_2' clashes with related field 'Target.src_safe'. Add a related_name argument to the definition for 'm2m_2'.
-invalid_models.clash3: Accessor for field 'foreign_1' clashes with field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'foreign_1'.
-invalid_models.clash3: Accessor for field 'foreign_1' clashes with related m2m field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'foreign_1'.
-invalid_models.clash3: Reverse query name for field 'foreign_1' clashes with field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'foreign_1'.
-invalid_models.clash3: Reverse query name for field 'foreign_1' clashes with related m2m field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'foreign_1'.
-invalid_models.clash3: Accessor for field 'foreign_2' clashes with m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'foreign_2'.
-invalid_models.clash3: Accessor for field 'foreign_2' clashes with related m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'foreign_2'.
-invalid_models.clash3: Reverse query name for field 'foreign_2' clashes with m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'foreign_2'.
-invalid_models.clash3: Reverse query name for field 'foreign_2' clashes with related m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'foreign_2'.
-invalid_models.clash3: Accessor for m2m field 'm2m_1' clashes with field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'm2m_1'.
-invalid_models.clash3: Accessor for m2m field 'm2m_1' clashes with related field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'm2m_1'.
-invalid_models.clash3: Reverse query name for m2m field 'm2m_1' clashes with field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'm2m_1'.
-invalid_models.clash3: Reverse query name for m2m field 'm2m_1' clashes with related field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'm2m_1'.
-invalid_models.clash3: Accessor for m2m field 'm2m_2' clashes with m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'm2m_2'.
-invalid_models.clash3: Accessor for m2m field 'm2m_2' clashes with related field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'm2m_2'.
-invalid_models.clash3: Reverse query name for m2m field 'm2m_2' clashes with m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'm2m_2'.
-invalid_models.clash3: Reverse query name for m2m field 'm2m_2' clashes with related field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'm2m_2'.
-invalid_models.clashforeign: Accessor for field 'foreign' clashes with field 'Target2.clashforeign_set'. Add a related_name argument to the definition for 'foreign'.
-invalid_models.clashm2m: Accessor for m2m field 'm2m' clashes with m2m field 'Target2.clashm2m_set'. Add a related_name argument to the definition for 'm2m'.
-invalid_models.target2: Accessor for field 'foreign_tgt' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'foreign_tgt'.
-invalid_models.target2: Accessor for field 'foreign_tgt' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'foreign_tgt'.
-invalid_models.target2: Accessor for field 'foreign_tgt' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'foreign_tgt'.
-invalid_models.target2: Accessor for field 'clashforeign_set' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'clashforeign_set'.
-invalid_models.target2: Accessor for field 'clashforeign_set' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'clashforeign_set'.
-invalid_models.target2: Accessor for field 'clashforeign_set' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'clashforeign_set'.
-invalid_models.target2: Accessor for m2m field 'm2m_tgt' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'm2m_tgt'.
-invalid_models.target2: Accessor for m2m field 'm2m_tgt' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'm2m_tgt'.
-invalid_models.target2: Accessor for m2m field 'm2m_tgt' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'm2m_tgt'.
-invalid_models.target2: Accessor for m2m field 'm2m_tgt' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'm2m_tgt'.
-invalid_models.target2: Accessor for m2m field 'm2m_tgt' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'm2m_tgt'.
-invalid_models.target2: Accessor for m2m field 'clashm2m_set' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'clashm2m_set'.
-invalid_models.target2: Accessor for m2m field 'clashm2m_set' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'clashm2m_set'.
-invalid_models.target2: Accessor for m2m field 'clashm2m_set' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'clashm2m_set'.
-invalid_models.target2: Accessor for m2m field 'clashm2m_set' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'clashm2m_set'.
-invalid_models.target2: Accessor for m2m field 'clashm2m_set' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'clashm2m_set'.
-invalid_models.selfclashforeign: Accessor for field 'selfclashforeign_set' clashes with field 'SelfClashForeign.selfclashforeign_set'. Add a related_name argument to the definition for 'selfclashforeign_set'.
-invalid_models.selfclashforeign: Reverse query name for field 'selfclashforeign_set' clashes with field 'SelfClashForeign.selfclashforeign'. Add a related_name argument to the definition for 'selfclashforeign_set'.
-invalid_models.selfclashforeign: Accessor for field 'foreign_1' clashes with field 'SelfClashForeign.id'. Add a related_name argument to the definition for 'foreign_1'.
-invalid_models.selfclashforeign: Reverse query name for field 'foreign_1' clashes with field 'SelfClashForeign.id'. Add a related_name argument to the definition for 'foreign_1'.
-invalid_models.selfclashforeign: Accessor for field 'foreign_2' clashes with field 'SelfClashForeign.src_safe'. Add a related_name argument to the definition for 'foreign_2'.
-invalid_models.selfclashforeign: Reverse query name for field 'foreign_2' clashes with field 'SelfClashForeign.src_safe'. Add a related_name argument to the definition for 'foreign_2'.
-invalid_models.selfclashm2m: Accessor for m2m field 'selfclashm2m_set' clashes with m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'selfclashm2m_set'.
-invalid_models.selfclashm2m: Reverse query name for m2m field 'selfclashm2m_set' clashes with field 'SelfClashM2M.selfclashm2m'. Add a related_name argument to the definition for 'selfclashm2m_set'.
-invalid_models.selfclashm2m: Accessor for m2m field 'selfclashm2m_set' clashes with related m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'selfclashm2m_set'.
-invalid_models.selfclashm2m: Accessor for m2m field 'm2m_1' clashes with field 'SelfClashM2M.id'. Add a related_name argument to the definition for 'm2m_1'.
-invalid_models.selfclashm2m: Accessor for m2m field 'm2m_2' clashes with field 'SelfClashM2M.src_safe'. Add a related_name argument to the definition for 'm2m_2'.
-invalid_models.selfclashm2m: Reverse query name for m2m field 'm2m_1' clashes with field 'SelfClashM2M.id'. Add a related_name argument to the definition for 'm2m_1'.
-invalid_models.selfclashm2m: Reverse query name for m2m field 'm2m_2' clashes with field 'SelfClashM2M.src_safe'. Add a related_name argument to the definition for 'm2m_2'.
-invalid_models.selfclashm2m: Accessor for m2m field 'm2m_3' clashes with m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_3'.
-invalid_models.selfclashm2m: Accessor for m2m field 'm2m_3' clashes with related m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_3'.
-invalid_models.selfclashm2m: Accessor for m2m field 'm2m_3' clashes with related m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_3'.
-invalid_models.selfclashm2m: Accessor for m2m field 'm2m_4' clashes with m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_4'.
-invalid_models.selfclashm2m: Accessor for m2m field 'm2m_4' clashes with related m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_4'.
-invalid_models.selfclashm2m: Accessor for m2m field 'm2m_4' clashes with related m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_4'.
-invalid_models.selfclashm2m: Reverse query name for m2m field 'm2m_3' clashes with field 'SelfClashM2M.selfclashm2m'. Add a related_name argument to the definition for 'm2m_3'.
-invalid_models.selfclashm2m: Reverse query name for m2m field 'm2m_4' clashes with field 'SelfClashM2M.selfclashm2m'. Add a related_name argument to the definition for 'm2m_4'.
-"""
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/lookup/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,233 +0,0 @@
-"""
-7. The lookup API
-
-This demonstrates features of the database API.
-"""
-
-from django.db import models
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100)
-    pub_date = models.DateTimeField()
-    class Meta:
-        ordering = ('-pub_date', 'headline')
-
-    def __str__(self):
-        return self.headline
-
-__test__ = {'API_TESTS':r"""
-# Create a couple of Articles.
->>> from datetime import datetime
->>> a1 = Article(headline='Article 1', pub_date=datetime(2005, 7, 26))
->>> a1.save()
->>> a2 = Article(headline='Article 2', pub_date=datetime(2005, 7, 27))
->>> a2.save()
->>> a3 = Article(headline='Article 3', pub_date=datetime(2005, 7, 27))
->>> a3.save()
->>> a4 = Article(headline='Article 4', pub_date=datetime(2005, 7, 28))
->>> a4.save()
->>> a5 = Article(headline='Article 5', pub_date=datetime(2005, 8, 1, 9, 0))
->>> a5.save()
->>> a6 = Article(headline='Article 6', pub_date=datetime(2005, 8, 1, 8, 0))
->>> a6.save()
->>> a7 = Article(headline='Article 7', pub_date=datetime(2005, 7, 27))
->>> a7.save()
-
-# Each QuerySet gets iterator(), which is a generator that "lazily" returns
-# results using database-level iteration.
->>> for a in Article.objects.iterator():
-...     print a.headline
-Article 5
-Article 6
-Article 4
-Article 2
-Article 3
-Article 7
-Article 1
-
-# iterator() can be used on any QuerySet.
->>> for a in Article.objects.filter(headline__endswith='4').iterator():
-...     print a.headline
-Article 4
-
-# count() returns the number of objects matching search criteria.
->>> Article.objects.count()
-7L
->>> Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).count()
-3L
->>> Article.objects.filter(headline__startswith='Blah blah').count()
-0L
-
-# count() should respect sliced query sets.
->>> articles = Article.objects.all()
->>> articles.count()
-7L
->>> articles[:4].count()
-4
->>> articles[1:100].count()
-6L
->>> articles[10:100].count()
-0
-
-# Date and date/time lookups can also be done with strings.
->>> Article.objects.filter(pub_date__exact='2005-07-27 00:00:00').count()
-3L
-
-# in_bulk() takes a list of IDs and returns a dictionary mapping IDs
-# to objects.
->>> Article.objects.in_bulk([1, 2])
-{1: <Article: Article 1>, 2: <Article: Article 2>}
->>> Article.objects.in_bulk([3])
-{3: <Article: Article 3>}
->>> Article.objects.in_bulk([1000])
-{}
->>> Article.objects.in_bulk([])
-{}
->>> Article.objects.in_bulk('foo')
-Traceback (most recent call last):
-    ...
-AssertionError: in_bulk() must be provided with a list of IDs.
->>> Article.objects.in_bulk()
-Traceback (most recent call last):
-    ...
-TypeError: in_bulk() takes exactly 2 arguments (1 given)
->>> Article.objects.in_bulk(headline__startswith='Blah')
-Traceback (most recent call last):
-    ...
-TypeError: in_bulk() got an unexpected keyword argument 'headline__startswith'
-
-# values() returns a list of dictionaries instead of object instances -- and
-# you can specify which fields you want to retrieve.
->>> Article.objects.values('headline')
-[{'headline': 'Article 5'}, {'headline': 'Article 6'}, {'headline': 'Article 4'}, {'headline': 'Article 2'}, {'headline': 'Article 3'}, {'headline': 'Article 7'}, {'headline': 'Article 1'}]
->>> Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).values('id')
-[{'id': 2}, {'id': 3}, {'id': 7}]
->>> list(Article.objects.values('id', 'headline')) == [{'id': 5, 'headline': 'Article 5'}, {'id': 6, 'headline': 'Article 6'}, {'id': 4, 'headline': 'Article 4'}, {'id': 2, 'headline': 'Article 2'}, {'id': 3, 'headline': 'Article 3'}, {'id': 7, 'headline': 'Article 7'}, {'id': 1, 'headline': 'Article 1'}]
-True
-
->>> for d in Article.objects.values('id', 'headline'):
-...     i = d.items()
-...     i.sort()
-...     i
-[('headline', 'Article 5'), ('id', 5)]
-[('headline', 'Article 6'), ('id', 6)]
-[('headline', 'Article 4'), ('id', 4)]
-[('headline', 'Article 2'), ('id', 2)]
-[('headline', 'Article 3'), ('id', 3)]
-[('headline', 'Article 7'), ('id', 7)]
-[('headline', 'Article 1'), ('id', 1)]
-
-# You can use values() with iterator() for memory savings, because iterator()
-# uses database-level iteration.
->>> for d in Article.objects.values('id', 'headline').iterator():
-...     i = d.items()
-...     i.sort()
-...     i
-[('headline', 'Article 5'), ('id', 5)]
-[('headline', 'Article 6'), ('id', 6)]
-[('headline', 'Article 4'), ('id', 4)]
-[('headline', 'Article 2'), ('id', 2)]
-[('headline', 'Article 3'), ('id', 3)]
-[('headline', 'Article 7'), ('id', 7)]
-[('headline', 'Article 1'), ('id', 1)]
-
-# if you don't specify which fields, all are returned
->>> list(Article.objects.filter(id=5).values()) == [{'id': 5, 'headline': 'Article 5', 'pub_date': datetime(2005, 8, 1, 9, 0)}]
-True
-
-# Every DateField and DateTimeField creates get_next_by_FOO() and
-# get_previous_by_FOO() methods.
-# In the case of identical date values, these methods will use the ID as a
-# fallback check. This guarantees that no records are skipped or duplicated.
->>> a1.get_next_by_pub_date()
-<Article: Article 2>
->>> a2.get_next_by_pub_date()
-<Article: Article 3>
->>> a2.get_next_by_pub_date(headline__endswith='6')
-<Article: Article 6>
->>> a3.get_next_by_pub_date()
-<Article: Article 7>
->>> a4.get_next_by_pub_date()
-<Article: Article 6>
->>> a5.get_next_by_pub_date()
-Traceback (most recent call last):
-    ...
-DoesNotExist: Article matching query does not exist.
->>> a6.get_next_by_pub_date()
-<Article: Article 5>
->>> a7.get_next_by_pub_date()
-<Article: Article 4>
-
->>> a7.get_previous_by_pub_date()
-<Article: Article 3>
->>> a6.get_previous_by_pub_date()
-<Article: Article 4>
->>> a5.get_previous_by_pub_date()
-<Article: Article 6>
->>> a4.get_previous_by_pub_date()
-<Article: Article 7>
->>> a3.get_previous_by_pub_date()
-<Article: Article 2>
->>> a2.get_previous_by_pub_date()
-<Article: Article 1>
-
-# Underscores and percent signs have special meaning in the underlying
-# SQL code, but Django handles the quoting of them automatically.
->>> a8 = Article(headline='Article_ with underscore', pub_date=datetime(2005, 11, 20))
->>> a8.save()
->>> Article.objects.filter(headline__startswith='Article')
-[<Article: Article_ with underscore>, <Article: Article 5>, <Article: Article 6>, <Article: Article 4>, <Article: Article 2>, <Article: Article 3>, <Article: Article 7>, <Article: Article 1>]
->>> Article.objects.filter(headline__startswith='Article_')
-[<Article: Article_ with underscore>]
-
->>> a9 = Article(headline='Article% with percent sign', pub_date=datetime(2005, 11, 21))
->>> a9.save()
->>> Article.objects.filter(headline__startswith='Article')
-[<Article: Article% with percent sign>, <Article: Article_ with underscore>, <Article: Article 5>, <Article: Article 6>, <Article: Article 4>, <Article: Article 2>, <Article: Article 3>, <Article: Article 7>, <Article: Article 1>]
->>> Article.objects.filter(headline__startswith='Article%')
-[<Article: Article% with percent sign>]
-
-# exclude() is the opposite of filter() when doing lookups:
->>> Article.objects.filter(headline__contains='Article').exclude(headline__contains='with')
-[<Article: Article 5>, <Article: Article 6>, <Article: Article 4>, <Article: Article 2>, <Article: Article 3>, <Article: Article 7>, <Article: Article 1>]
->>> Article.objects.exclude(headline__startswith="Article_")
-[<Article: Article% with percent sign>, <Article: Article 5>, <Article: Article 6>, <Article: Article 4>, <Article: Article 2>, <Article: Article 3>, <Article: Article 7>, <Article: Article 1>]
->>> Article.objects.exclude(headline="Article 7")
-[<Article: Article% with percent sign>, <Article: Article_ with underscore>, <Article: Article 5>, <Article: Article 6>, <Article: Article 4>, <Article: Article 2>, <Article: Article 3>, <Article: Article 1>]
-
-# Backslashes also have special meaning in the underlying SQL code, but Django
-# automatically quotes them appropriately.
->>> a10 = Article(headline='Article with \\ backslash', pub_date=datetime(2005, 11, 22))
->>> a10.save()
->>> Article.objects.filter(headline__contains='\\')
-[<Article: Article with \ backslash>]
-
-# none() returns an EmptyQuerySet that behaves like any other QuerySet object
->>> Article.objects.none()
-[]
->>> Article.objects.none().filter(headline__startswith='Article')
-[]
->>> Article.objects.none().count()
-0
->>> [article for article in Article.objects.none().iterator()]
-[]
-
-# using __in with an empty list should return an empty query set
->>> Article.objects.filter(id__in=[])
-[]
-
->>> Article.objects.exclude(id__in=[])
-[<Article: Article with \ backslash>, <Article: Article% with percent sign>, <Article: Article_ with underscore>, <Article: Article 5>, <Article: Article 6>, <Article: Article 4>, <Article: Article 2>, <Article: Article 3>, <Article: Article 7>, <Article: Article 1>]
-
-# Programming errors are pointed out with nice error messages
->>> Article.objects.filter(pub_date_year='2005').count()
-Traceback (most recent call last):
-    ...
-TypeError: Cannot resolve keyword 'pub_date_year' into field
-
->>> Article.objects.filter(headline__starts='Article')
-Traceback (most recent call last):
-    ...
-TypeError: Cannot resolve keyword 'headline__starts' into field
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/m2m_and_m2o/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,65 +0,0 @@
-"""
-29. Many-to-many and many-to-one relationships to the same table
-
-Make sure to set ``related_name`` if you use relationships to the same table.
-"""
-
-from django.db import models
-
-class User(models.Model):
-    username = models.CharField(maxlength=20)
-
-class Issue(models.Model):
-    num = models.IntegerField()
-    cc = models.ManyToManyField(User, blank=True, related_name='test_issue_cc')
-    client = models.ForeignKey(User, related_name='test_issue_client')
-
-    def __str__(self):
-        return str(self.num)
-
-    class Meta:
-        ordering = ('num',)
-
-
-__test__ = {'API_TESTS':"""
->>> Issue.objects.all()
-[]
->>> r = User(username='russell')
->>> r.save()
->>> g = User(username='gustav')
->>> g.save()
-
->>> i = Issue(num=1)
->>> i.client = r
->>> i.save()
-
->>> i2 = Issue(num=2)
->>> i2.client = r
->>> i2.save()
->>> i2.cc.add(r)
-
->>> i3 = Issue(num=3)
->>> i3.client = g
->>> i3.save()
->>> i3.cc.add(r)
-
->>> from django.db.models.query import Q
-
->>> Issue.objects.filter(client=r.id)
-[<Issue: 1>, <Issue: 2>]
->>> Issue.objects.filter(client=g.id)
-[<Issue: 3>]
->>> Issue.objects.filter(cc__id__exact=g.id)
-[]
->>> Issue.objects.filter(cc__id__exact=r.id)
-[<Issue: 2>, <Issue: 3>]
-
-# These queries combine results from the m2m and the m2o relationships.
-# They're three ways of saying the same thing.
->>> Issue.objects.filter(Q(cc__id__exact=r.id) | Q(client=r.id))
-[<Issue: 1>, <Issue: 2>, <Issue: 3>]
->>> Issue.objects.filter(cc__id__exact=r.id) | Issue.objects.filter(client=r.id)
-[<Issue: 1>, <Issue: 2>, <Issue: 3>]
->>> Issue.objects.filter(Q(client=r.id) | Q(cc__id__exact=r.id))
-[<Issue: 1>, <Issue: 2>, <Issue: 3>]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/m2m_intermediary/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,68 +0,0 @@
-"""
-9. Many-to-many relationships via an intermediary table
-
-For many-to-many relationships that need extra fields on the intermediary
-table, use an intermediary model.
-
-In this example, an ``Article`` can have multiple ``Reporter``s, and each
-``Article``-``Reporter`` combination (a ``Writer``) has a ``position`` field,
-which specifies the ``Reporter``'s position for the given article (e.g. "Staff
-writer").
-"""
-
-from django.db import models
-
-class Reporter(models.Model):
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-
-    def __str__(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100)
-    pub_date = models.DateField()
-
-    def __str__(self):
-        return self.headline
-
-class Writer(models.Model):
-    reporter = models.ForeignKey(Reporter)
-    article = models.ForeignKey(Article)
-    position = models.CharField(maxlength=100)
-
-    def __str__(self):
-        return '%s (%s)' % (self.reporter, self.position)
-
-__test__ = {'API_TESTS':"""
-# Create a few Reporters.
->>> r1 = Reporter(first_name='John', last_name='Smith')
->>> r1.save()
->>> r2 = Reporter(first_name='Jane', last_name='Doe')
->>> r2.save()
-
-# Create an Article.
->>> from datetime import datetime
->>> a = Article(headline='This is a test', pub_date=datetime(2005, 7, 27))
->>> a.save()
-
-# Create a few Writers.
->>> w1 = Writer(reporter=r1, article=a, position='Main writer')
->>> w1.save()
->>> w2 = Writer(reporter=r2, article=a, position='Contributor')
->>> w2.save()
-
-# Play around with the API.
->>> a.writer_set.select_related().order_by('-position')
-[<Writer: John Smith (Main writer)>, <Writer: Jane Doe (Contributor)>]
->>> w1.reporter
-<Reporter: John Smith>
->>> w2.reporter
-<Reporter: Jane Doe>
->>> w1.article
-<Article: This is a test>
->>> w2.article
-<Article: This is a test>
->>> r1.writer_set.all()
-[<Writer: John Smith (Main writer)>]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/m2m_multiple/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,79 +0,0 @@
-"""
-20. Multiple many-to-many relationships between the same two tables
-
-In this example, an Article can have many Categories (as "primary") and many
-Categories (as "secondary").
-
-Set ``related_name`` to designate what the reverse relationship is called.
-"""
-
-from django.db import models
-
-class Category(models.Model):
-    name = models.CharField(maxlength=20)
-    class Meta:
-       ordering = ('name',)
-
-    def __str__(self):
-        return self.name
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=50)
-    pub_date = models.DateTimeField()
-    primary_categories = models.ManyToManyField(Category, related_name='primary_article_set')
-    secondary_categories = models.ManyToManyField(Category, related_name='secondary_article_set')
-    class Meta:
-       ordering = ('pub_date',)
-
-    def __str__(self):
-        return self.headline
-
-__test__ = {'API_TESTS':"""
->>> from datetime import datetime
-
->>> c1 = Category(name='Sports')
->>> c1.save()
->>> c2 = Category(name='News')
->>> c2.save()
->>> c3 = Category(name='Crime')
->>> c3.save()
->>> c4 = Category(name='Life')
->>> c4.save()
-
->>> a1 = Article(headline='Area man steals', pub_date=datetime(2005, 11, 27))
->>> a1.save()
->>> a1.primary_categories.add(c2, c3)
->>> a1.secondary_categories.add(c4)
-
->>> a2 = Article(headline='Area man runs', pub_date=datetime(2005, 11, 28))
->>> a2.save()
->>> a2.primary_categories.add(c1, c2)
->>> a2.secondary_categories.add(c4)
-
->>> a1.primary_categories.all()
-[<Category: Crime>, <Category: News>]
-
->>> a2.primary_categories.all()
-[<Category: News>, <Category: Sports>]
-
->>> a1.secondary_categories.all()
-[<Category: Life>]
-
-
->>> c1.primary_article_set.all()
-[<Article: Area man runs>]
->>> c1.secondary_article_set.all()
-[]
->>> c2.primary_article_set.all()
-[<Article: Area man steals>, <Article: Area man runs>]
->>> c2.secondary_article_set.all()
-[]
->>> c3.primary_article_set.all()
-[<Article: Area man steals>]
->>> c3.secondary_article_set.all()
-[]
->>> c4.primary_article_set.all()
-[]
->>> c4.secondary_article_set.all()
-[<Article: Area man steals>, <Article: Area man runs>]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/m2m_recursive/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,192 +0,0 @@
-"""
-28. Many-to-many relationships between the same two tables
-
-In this example, A Person can have many friends, who are also people. Friendship is a
-symmetrical relationship - if I am your friend, you are my friend.
-
-A person can also have many idols - but while I may idolize you, you may not think
-the same of me. 'Idols' is an example of a non-symmetrical m2m field. Only recursive
-m2m fields may be non-symmetrical, and they are symmetrical by default.
-
-This test validates that the m2m table will create a mangled name for the m2m table if
-there will be a clash, and tests that symmetry is preserved where appropriate.
-"""
-
-from django.db import models
-
-class Person(models.Model):
-    name = models.CharField(maxlength=20)
-    friends = models.ManyToManyField('self')
-    idols = models.ManyToManyField('self', symmetrical=False, related_name='stalkers')
-
-    def __str__(self):
-        return self.name
-
-__test__ = {'API_TESTS':"""
->>> a = Person(name='Anne')
->>> a.save()
->>> b = Person(name='Bill')
->>> b.save()
->>> c = Person(name='Chuck')
->>> c.save()
->>> d = Person(name='David')
->>> d.save()
-
-# Add some friends in the direction of field definition
-# Anne is friends with Bill and Chuck
->>> a.friends.add(b,c)
-
-# David is friends with Anne and Chuck - add in reverse direction
->>> d.friends.add(a,c)
-
-# Who is friends with Anne?
->>> a.friends.all()
-[<Person: Bill>, <Person: Chuck>, <Person: David>]
-
-# Who is friends with Bill?
->>> b.friends.all()
-[<Person: Anne>]
-
-# Who is friends with Chuck?
->>> c.friends.all()
-[<Person: Anne>, <Person: David>]
-
-# Who is friends with David?
->>> d.friends.all()
-[<Person: Anne>, <Person: Chuck>]
-
-# Bill is already friends with Anne - add Anne again, but in the reverse direction
->>> b.friends.add(a)
-
-# Who is friends with Anne?
->>> a.friends.all()
-[<Person: Bill>, <Person: Chuck>, <Person: David>]
-
-# Who is friends with Bill?
->>> b.friends.all()
-[<Person: Anne>]
-
-# Remove Anne from Bill's friends
->>> b.friends.remove(a)
-
-# Who is friends with Anne?
->>> a.friends.all()
-[<Person: Chuck>, <Person: David>]
-
-# Who is friends with Bill?
->>> b.friends.all()
-[]
-
-# Clear Anne's group of friends
->>> a.friends.clear()
-
-# Who is friends with Anne?
->>> a.friends.all()
-[]
-
-# Reverse relationships should also be gone
-# Who is friends with Chuck?
->>> c.friends.all()
-[<Person: David>]
-
-# Who is friends with David?
->>> d.friends.all()
-[<Person: Chuck>]
-
-
-# Add some idols in the direction of field definition
-# Anne idolizes Bill and Chuck
->>> a.idols.add(b,c)
-
-# Bill idolizes Anne right back
->>> b.idols.add(a)
-
-# David is idolized by Anne and Chuck - add in reverse direction
->>> d.stalkers.add(a,c)
-
-# Who are Anne's idols?
->>> a.idols.all()
-[<Person: Bill>, <Person: Chuck>, <Person: David>]
-
-# Who is stalking Anne?
->>> a.stalkers.all()
-[<Person: Bill>]
-
-# Who are Bill's idols?
->>> b.idols.all()
-[<Person: Anne>]
-
-# Who is stalking Bill?
->>> b.stalkers.all()
-[<Person: Anne>]
-
-# Who are Chuck's idols?
->>> c.idols.all()
-[<Person: David>]
-
-# Who is stalking Chuck?
->>> c.stalkers.all()
-[<Person: Anne>]
-
-# Who are David's idols?
->>> d.idols.all()
-[]
-
-# Who is stalking David
->>> d.stalkers.all()
-[<Person: Anne>, <Person: Chuck>]
-
-# Bill is already being stalked by Anne - add Anne again, but in the reverse direction
->>> b.stalkers.add(a)
-
-# Who are Anne's idols?
->>> a.idols.all()
-[<Person: Bill>, <Person: Chuck>, <Person: David>]
-
-# Who is stalking Anne?
-[<Person: Bill>]
-
-# Who are Bill's idols
->>> b.idols.all()
-[<Person: Anne>]
-
-# Who is stalking Bill?
->>> b.stalkers.all()
-[<Person: Anne>]
-
-# Remove Anne from Bill's list of stalkers
->>> b.stalkers.remove(a)
-
-# Who are Anne's idols?
->>> a.idols.all()
-[<Person: Chuck>, <Person: David>]
-
-# Who is stalking Anne?
->>> a.stalkers.all()
-[<Person: Bill>]
-
-# Who are Bill's idols?
->>> b.idols.all()
-[<Person: Anne>]
-
-# Who is stalking Bill?
->>> b.stalkers.all()
-[]
-
-# Clear Anne's group of idols
->>> a.idols.clear()
-
-# Who are Anne's idols
->>> a.idols.all()
-[]
-
-# Reverse relationships should also be gone
-# Who is stalking Chuck?
->>> c.stalkers.all()
-[]
-
-# Who is friends with David?
->>> d.stalkers.all()
-[<Person: Chuck>]
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/m2o_recursive/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,40 +0,0 @@
-"""
-11. Relating an object to itself, many-to-one
-
-To define a many-to-one relationship between a model and itself, use
-``ForeignKey('self')``.
-
-In this example, a ``Category`` is related to itself. That is, each
-``Category`` has a parent ``Category``.
-
-Set ``related_name`` to designate what the reverse relationship is called.
-"""
-
-from django.db import models
-
-class Category(models.Model):
-    name = models.CharField(maxlength=20)
-    parent = models.ForeignKey('self', null=True, related_name='child_set')
-
-    def __str__(self):
-        return self.name
-
-__test__ = {'API_TESTS':"""
-# Create a few Category objects.
->>> r = Category(id=None, name='Root category', parent=None)
->>> r.save()
->>> c = Category(id=None, name='Child category', parent=r)
->>> c.save()
-
->>> r.child_set.all()
-[<Category: Child category>]
->>> r.child_set.get(name__startswith='Child')
-<Category: Child category>
->>> print r.parent
-None
-
->>> c.child_set.all()
-[]
->>> c.parent
-<Category: Root category>
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/m2o_recursive2/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,43 +0,0 @@
-"""
-12. Relating a model to another model more than once
-
-In this example, a ``Person`` can have a ``mother`` and ``father`` -- both of
-which are other ``Person`` objects.
-
-Set ``related_name`` to designate what the reverse relationship is called.
-"""
-
-from django.db import models
-
-class Person(models.Model):
-    full_name = models.CharField(maxlength=20)
-    mother = models.ForeignKey('self', null=True, related_name='mothers_child_set')
-    father = models.ForeignKey('self', null=True, related_name='fathers_child_set')
-
-    def __str__(self):
-        return self.full_name
-
-__test__ = {'API_TESTS':"""
-# Create two Person objects -- the mom and dad in our family.
->>> dad = Person(full_name='John Smith Senior', mother=None, father=None)
->>> dad.save()
->>> mom = Person(full_name='Jane Smith', mother=None, father=None)
->>> mom.save()
-
-# Give mom and dad a kid.
->>> kid = Person(full_name='John Smith Junior', mother=mom, father=dad)
->>> kid.save()
-
->>> kid.mother
-<Person: Jane Smith>
->>> kid.father
-<Person: John Smith Senior>
->>> dad.fathers_child_set.all()
-[<Person: John Smith Junior>]
->>> mom.mothers_child_set.all()
-[<Person: John Smith Junior>]
->>> kid.mothers_child_set.all()
-[]
->>> kid.fathers_child_set.all()
-[]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/manipulators/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,91 +0,0 @@
-"""
-27. Default manipulators
-
-Each model gets an AddManipulator and ChangeManipulator by default.
-"""
-
-from django.db import models
-
-class Musician(models.Model):
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-
-    def __str__(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-class Album(models.Model):
-    name = models.CharField(maxlength=100)
-    musician = models.ForeignKey(Musician)
-    release_date = models.DateField(blank=True, null=True)
-
-    def __str__(self):
-        return self.name
-
-__test__ = {'API_TESTS':"""
->>> from django.utils.datastructures import MultiValueDict
-
-# Create a Musician object via the default AddManipulator.
->>> man = Musician.AddManipulator()
->>> data = MultiValueDict({'first_name': ['Ella'], 'last_name': ['Fitzgerald']})
-
->>> man.get_validation_errors(data)
-{}
->>> man.do_html2python(data)
->>> m1 = man.save(data)
-
-# Verify it worked.
->>> Musician.objects.all()
-[<Musician: Ella Fitzgerald>]
->>> [m1] == list(Musician.objects.all())
-True
-
-# Attempt to add a Musician without a first_name.
->>> man.get_validation_errors(MultiValueDict({'last_name': ['Blakey']}))
-{'first_name': ['This field is required.']}
-
-# Attempt to add a Musician without a first_name and last_name.
->>> man.get_validation_errors(MultiValueDict({}))
-{'first_name': ['This field is required.'], 'last_name': ['This field is required.']}
-
-# Attempt to create an Album without a name or musician.
->>> man = Album.AddManipulator()
->>> man.get_validation_errors(MultiValueDict({}))
-{'musician': ['This field is required.'], 'name': ['This field is required.']}
-
-# Attempt to create an Album with an invalid musician.
->>> man.get_validation_errors(MultiValueDict({'name': ['Sallies Fforth'], 'musician': ['foo']}))
-{'musician': ["Select a valid choice; 'foo' is not in ['', '1']."]}
-
-# Attempt to create an Album with an invalid release_date.
->>> man.get_validation_errors(MultiValueDict({'name': ['Sallies Fforth'], 'musician': ['1'], 'release_date': 'today'}))
-{'release_date': ['Enter a valid date in YYYY-MM-DD format.']}
-
-# Create an Album without a release_date (because it's optional).
->>> data = MultiValueDict({'name': ['Ella and Basie'], 'musician': ['1']})
->>> man.get_validation_errors(data)
-{}
->>> man.do_html2python(data)
->>> a1 = man.save(data)
-
-# Verify it worked.
->>> Album.objects.all()
-[<Album: Ella and Basie>]
->>> Album.objects.get().musician
-<Musician: Ella Fitzgerald>
-
-# Create an Album with a release_date.
->>> data = MultiValueDict({'name': ['Ultimate Ella'], 'musician': ['1'], 'release_date': ['2005-02-13']})
->>> man.get_validation_errors(data)
-{}
->>> man.do_html2python(data)
->>> a2 = man.save(data)
-
-# Verify it worked.
->>> Album.objects.order_by('name')
-[<Album: Ella and Basie>, <Album: Ultimate Ella>]
->>> a2 = Album.objects.get(pk=2)
->>> a2
-<Album: Ultimate Ella>
->>> a2.release_date
-datetime.date(2005, 2, 13)
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/many_to_many/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,258 +0,0 @@
-"""
-5. Many-to-many relationships
-
-To define a many-to-many relationship, use ManyToManyField().
-
-In this example, an article can be published in multiple publications,
-and a publication has multiple articles.
-"""
-
-from django.db import models
-
-class Publication(models.Model):
-    title = models.CharField(maxlength=30)
-
-    def __str__(self):
-        return self.title
-
-    class Meta:
-        ordering = ('title',)
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100)
-    publications = models.ManyToManyField(Publication)
-
-    def __str__(self):
-        return self.headline
-
-    class Meta:
-        ordering = ('headline',)
-
-__test__ = {'API_TESTS':"""
-# Create a couple of Publications.
->>> p1 = Publication(id=None, title='The Python Journal')
->>> p1.save()
->>> p2 = Publication(id=None, title='Science News')
->>> p2.save()
->>> p3 = Publication(id=None, title='Science Weekly')
->>> p3.save()
-
-# Create an Article.
->>> a1 = Article(id=None, headline='Django lets you build Web apps easily')
->>> a1.save()
-
-# Associate the Article with a Publication.
->>> a1.publications.add(p1)
-
-# Create another Article, and set it to appear in both Publications.
->>> a2 = Article(id=None, headline='NASA uses Python')
->>> a2.save()
->>> a2.publications.add(p1, p2)
->>> a2.publications.add(p3)
-
-# Adding a second time is OK
->>> a2.publications.add(p3)
-
-# Add a Publication directly via publications.add by using keyword arguments.
->>> new_publication = a2.publications.create(title='Highlights for Children')
-
-# Article objects have access to their related Publication objects.
->>> a1.publications.all()
-[<Publication: The Python Journal>]
->>> a2.publications.all()
-[<Publication: Highlights for Children>, <Publication: Science News>, <Publication: Science Weekly>, <Publication: The Python Journal>]
-
-# Publication objects have access to their related Article objects.
->>> p2.article_set.all()
-[<Article: NASA uses Python>]
->>> p1.article_set.all()
-[<Article: Django lets you build Web apps easily>, <Article: NASA uses Python>]
->>> Publication.objects.get(id=4).article_set.all()
-[<Article: NASA uses Python>]
-
-# We can perform kwarg queries across m2m relationships
->>> Article.objects.filter(publications__id__exact=1)
-[<Article: Django lets you build Web apps easily>, <Article: NASA uses Python>]
->>> Article.objects.filter(publications__pk=1)
-[<Article: Django lets you build Web apps easily>, <Article: NASA uses Python>]
->>> Article.objects.filter(publications=1)
-[<Article: Django lets you build Web apps easily>, <Article: NASA uses Python>]
->>> Article.objects.filter(publications=p1)
-[<Article: Django lets you build Web apps easily>, <Article: NASA uses Python>]
-
->>> Article.objects.filter(publications__title__startswith="Science")
-[<Article: NASA uses Python>, <Article: NASA uses Python>]
-
->>> Article.objects.filter(publications__title__startswith="Science").distinct()
-[<Article: NASA uses Python>]
-
-# The count() function respects distinct() as well.
->>> Article.objects.filter(publications__title__startswith="Science").count()
-2
-
->>> Article.objects.filter(publications__title__startswith="Science").distinct().count()
-1
-
->>> Article.objects.filter(publications__in=[1,2]).distinct()
-[<Article: Django lets you build Web apps easily>, <Article: NASA uses Python>]
->>> Article.objects.filter(publications__in=[1,p2]).distinct()
-[<Article: Django lets you build Web apps easily>, <Article: NASA uses Python>]
->>> Article.objects.filter(publications__in=[p1,p2]).distinct()
-[<Article: Django lets you build Web apps easily>, <Article: NASA uses Python>]
-
-# Reverse m2m queries are supported (i.e., starting at the table that doesn't
-# have a ManyToManyField).
->>> Publication.objects.filter(id__exact=1)
-[<Publication: The Python Journal>]
->>> Publication.objects.filter(pk=1)
-[<Publication: The Python Journal>]
-
->>> Publication.objects.filter(article__headline__startswith="NASA")
-[<Publication: Highlights for Children>, <Publication: Science News>, <Publication: Science Weekly>, <Publication: The Python Journal>]
-
->>> Publication.objects.filter(article__id__exact=1)
-[<Publication: The Python Journal>]
->>> Publication.objects.filter(article__pk=1)
-[<Publication: The Python Journal>]
->>> Publication.objects.filter(article=1)
-[<Publication: The Python Journal>]
->>> Publication.objects.filter(article=a1)
-[<Publication: The Python Journal>]
-
->>> Publication.objects.filter(article__in=[1,2]).distinct()
-[<Publication: Highlights for Children>, <Publication: Science News>, <Publication: Science Weekly>, <Publication: The Python Journal>]
->>> Publication.objects.filter(article__in=[1,a2]).distinct()
-[<Publication: Highlights for Children>, <Publication: Science News>, <Publication: Science Weekly>, <Publication: The Python Journal>]
->>> Publication.objects.filter(article__in=[a1,a2]).distinct()
-[<Publication: Highlights for Children>, <Publication: Science News>, <Publication: Science Weekly>, <Publication: The Python Journal>]
-
-# If we delete a Publication, its Articles won't be able to access it.
->>> p1.delete()
->>> Publication.objects.all()
-[<Publication: Highlights for Children>, <Publication: Science News>, <Publication: Science Weekly>]
->>> a1 = Article.objects.get(pk=1)
->>> a1.publications.all()
-[]
-
-# If we delete an Article, its Publications won't be able to access it.
->>> a2.delete()
->>> Article.objects.all()
-[<Article: Django lets you build Web apps easily>]
->>> p2.article_set.all()
-[]
-
-# Adding via the 'other' end of an m2m
->>> a4 = Article(headline='NASA finds intelligent life on Earth')
->>> a4.save()
->>> p2.article_set.add(a4)
->>> p2.article_set.all()
-[<Article: NASA finds intelligent life on Earth>]
->>> a4.publications.all()
-[<Publication: Science News>]
-
-# Adding via the other end using keywords
->>> new_article = p2.article_set.create(headline='Oxygen-free diet works wonders')
->>> p2.article_set.all()
-[<Article: NASA finds intelligent life on Earth>, <Article: Oxygen-free diet works wonders>]
->>> a5 = p2.article_set.all()[1]
->>> a5.publications.all()
-[<Publication: Science News>]
-
-# Removing publication from an article:
->>> a4.publications.remove(p2)
->>> p2.article_set.all()
-[<Article: Oxygen-free diet works wonders>]
->>> a4.publications.all()
-[]
-
-# And from the other end
->>> p2.article_set.remove(a5)
->>> p2.article_set.all()
-[]
->>> a5.publications.all()
-[]
-
-# Relation sets can be assigned. Assignment clears any existing set members
->>> p2.article_set = [a4, a5]
->>> p2.article_set.all()
-[<Article: NASA finds intelligent life on Earth>, <Article: Oxygen-free diet works wonders>]
->>> a4.publications.all()
-[<Publication: Science News>]
->>> a4.publications = [p3]
->>> p2.article_set.all()
-[<Article: Oxygen-free diet works wonders>]
->>> a4.publications.all()
-[<Publication: Science Weekly>]
-
-# Relation sets can be cleared:
->>> p2.article_set.clear()
->>> p2.article_set.all()
-[]
->>> a4.publications.all()
-[<Publication: Science Weekly>]
-
-# And you can clear from the other end
->>> p2.article_set.add(a4, a5)
->>> p2.article_set.all()
-[<Article: NASA finds intelligent life on Earth>, <Article: Oxygen-free diet works wonders>]
->>> a4.publications.all()
-[<Publication: Science News>, <Publication: Science Weekly>]
->>> a4.publications.clear()
->>> a4.publications.all()
-[]
->>> p2.article_set.all()
-[<Article: Oxygen-free diet works wonders>]
-
-# Relation sets can also be set using primary key values
->>> p2.article_set = [a4.id, a5.id]
->>> p2.article_set.all()
-[<Article: NASA finds intelligent life on Earth>, <Article: Oxygen-free diet works wonders>]
->>> a4.publications.all()
-[<Publication: Science News>]
->>> a4.publications = [p3.id]
->>> p2.article_set.all()
-[<Article: Oxygen-free diet works wonders>]
->>> a4.publications.all()
-[<Publication: Science Weekly>]
-
-# Recreate the article and Publication we have deleted.
->>> p1 = Publication(id=None, title='The Python Journal')
->>> p1.save()
->>> a2 = Article(id=None, headline='NASA uses Python')
->>> a2.save()
->>> a2.publications.add(p1, p2, p3)
-
-# Bulk delete some Publications - references to deleted publications should go
->>> Publication.objects.filter(title__startswith='Science').delete()
->>> Publication.objects.all()
-[<Publication: Highlights for Children>, <Publication: The Python Journal>]
->>> Article.objects.all()
-[<Article: Django lets you build Web apps easily>, <Article: NASA finds intelligent life on Earth>, <Article: NASA uses Python>, <Article: Oxygen-free diet works wonders>]
->>> a2.publications.all()
-[<Publication: The Python Journal>]
-
-# Bulk delete some articles - references to deleted objects should go
->>> q = Article.objects.filter(headline__startswith='Django')
->>> print q
-[<Article: Django lets you build Web apps easily>]
->>> q.delete()
-
-# After the delete, the QuerySet cache needs to be cleared, and the referenced objects should be gone
->>> print q
-[]
->>> p1.article_set.all()
-[<Article: NASA uses Python>]
-
-# An alternate to calling clear() is to assign the empty set
->>> p1.article_set = []
->>> p1.article_set.all()
-[]
-
->>> a2.publications = [p1, new_publication]
->>> a2.publications.all()
-[<Publication: Highlights for Children>, <Publication: The Python Journal>]
->>> a2.publications = []
->>> a2.publications.all()
-[]
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/many_to_one/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,266 +0,0 @@
-"""
-4. Many-to-one relationships
-
-To define a many-to-one relationship, use ``ForeignKey()`` .
-"""
-
-from django.db import models
-
-class Reporter(models.Model):
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    email = models.EmailField()
-
-    def __str__(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100)
-    pub_date = models.DateField()
-    reporter = models.ForeignKey(Reporter)
-
-    def __str__(self):
-        return self.headline
-
-    class Meta:
-        ordering = ('headline',)
-
-__test__ = {'API_TESTS':"""
-# Create a few Reporters.
->>> r = Reporter(first_name='John', last_name='Smith', email='john@example.com')
->>> r.save()
-
->>> r2 = Reporter(first_name='Paul', last_name='Jones', email='paul@example.com')
->>> r2.save()
-
-# Create an Article.
->>> from datetime import datetime
->>> a = Article(id=None, headline="This is a test", pub_date=datetime(2005, 7, 27), reporter=r)
->>> a.save()
-
->>> a.reporter.id
-1
-
->>> a.reporter
-<Reporter: John Smith>
-
-# Article objects have access to their related Reporter objects.
->>> r = a.reporter
->>> r.first_name, r.last_name
-('John', 'Smith')
-
-# Create an Article via the Reporter object.
->>> new_article = r.article_set.create(headline="John's second story", pub_date=datetime(2005, 7, 29))
->>> new_article
-<Article: John's second story>
->>> new_article.reporter.id
-1
-
-# Create a new article, and add it to the article set.
->>> new_article2 = Article(headline="Paul's story", pub_date=datetime(2006, 1, 17))
->>> r.article_set.add(new_article2)
->>> new_article2.reporter.id
-1
->>> r.article_set.all()
-[<Article: John's second story>, <Article: Paul's story>, <Article: This is a test>]
-
-# Add the same article to a different article set - check that it moves.
->>> r2.article_set.add(new_article2)
->>> new_article2.reporter.id
-2
->>> r.article_set.all()
-[<Article: John's second story>, <Article: This is a test>]
->>> r2.article_set.all()
-[<Article: Paul's story>]
-
-# Assign the article to the reporter directly using the descriptor
->>> new_article2.reporter = r
->>> new_article2.save()
->>> new_article2.reporter
-<Reporter: John Smith>
->>> new_article2.reporter.id
-1
->>> r.article_set.all()
-[<Article: John's second story>, <Article: Paul's story>, <Article: This is a test>]
->>> r2.article_set.all()
-[]
-
-# Set the article back again using set descriptor.
->>> r2.article_set = [new_article, new_article2]
->>> r.article_set.all()
-[<Article: This is a test>]
->>> r2.article_set.all()
-[<Article: John's second story>, <Article: Paul's story>]
-
-# Funny case - assignment notation can only go so far; because the
-# ForeignKey cannot be null, existing members of the set must remain
->>> r.article_set = [new_article]
->>> r.article_set.all()
-[<Article: John's second story>, <Article: This is a test>]
->>> r2.article_set.all()
-[<Article: Paul's story>]
-
-# Reporter cannot be null - there should not be a clear or remove method
->>> hasattr(r2.article_set, 'remove')
-False
->>> hasattr(r2.article_set, 'clear')
-False
-
-# Reporter objects have access to their related Article objects.
->>> r.article_set.all()
-[<Article: John's second story>, <Article: This is a test>]
-
->>> r.article_set.filter(headline__startswith='This')
-[<Article: This is a test>]
-
->>> r.article_set.count()
-2
-
->>> r2.article_set.count()
-1
-
-# Get articles by id
->>> Article.objects.filter(id__exact=1)
-[<Article: This is a test>]
->>> Article.objects.filter(pk=1)
-[<Article: This is a test>]
-
-# Query on an article property
->>> Article.objects.filter(headline__startswith='This')
-[<Article: This is a test>]
-
-# The API automatically follows relationships as far as you need.
-# Use double underscores to separate relationships.
-# This works as many levels deep as you want. There's no limit.
-# Find all Articles for any Reporter whose first name is "John".
->>> Article.objects.filter(reporter__first_name__exact='John')
-[<Article: John's second story>, <Article: This is a test>]
-
-# Check that implied __exact also works
->>> Article.objects.filter(reporter__first_name='John')
-[<Article: John's second story>, <Article: This is a test>]
-
-# Query twice over the related field.
->>> Article.objects.filter(reporter__first_name__exact='John', reporter__last_name__exact='Smith')
-[<Article: John's second story>, <Article: This is a test>]
-
-# The underlying query only makes one join when a related table is referenced twice.
->>> query = Article.objects.filter(reporter__first_name__exact='John', reporter__last_name__exact='Smith')
->>> null, sql, null = query._get_sql_clause()
->>> sql.count('INNER JOIN')
-1
-
-# The automatically joined table has a predictable name.
->>> Article.objects.filter(reporter__first_name__exact='John').extra(where=["many_to_one_article__reporter.last_name='Smith'"])
-[<Article: John's second story>, <Article: This is a test>]
-
-# Find all Articles for the Reporter whose ID is 1.
-# Use direct ID check, pk check, and object comparison 
->>> Article.objects.filter(reporter__id__exact=1)
-[<Article: John's second story>, <Article: This is a test>]
->>> Article.objects.filter(reporter__pk=1)
-[<Article: John's second story>, <Article: This is a test>]
->>> Article.objects.filter(reporter=1)
-[<Article: John's second story>, <Article: This is a test>]
->>> Article.objects.filter(reporter=r)
-[<Article: John's second story>, <Article: This is a test>]
-
->>> Article.objects.filter(reporter__in=[1,2]).distinct()
-[<Article: John's second story>, <Article: Paul's story>, <Article: This is a test>]
->>> Article.objects.filter(reporter__in=[r,r2]).distinct()
-[<Article: John's second story>, <Article: Paul's story>, <Article: This is a test>]
-
-# You need two underscores between "reporter" and "id" -- not one.
->>> Article.objects.filter(reporter_id__exact=1)
-Traceback (most recent call last):
-    ...
-TypeError: Cannot resolve keyword 'reporter_id' into field
-
-# You need to specify a comparison clause
->>> Article.objects.filter(reporter_id=1)
-Traceback (most recent call last):
-    ...
-TypeError: Cannot resolve keyword 'reporter_id' into field
-
-# You can also instantiate an Article by passing
-# the Reporter's ID instead of a Reporter object.
->>> a3 = Article(id=None, headline="This is a test", pub_date=datetime(2005, 7, 27), reporter_id=r.id)
->>> a3.save()
->>> a3.reporter.id
-1
->>> a3.reporter
-<Reporter: John Smith>
-
-# Similarly, the reporter ID can be a string.
->>> a4 = Article(id=None, headline="This is a test", pub_date=datetime(2005, 7, 27), reporter_id="1")
->>> a4.save()
->>> a4.reporter
-<Reporter: John Smith>
-
-# Reporters can be queried
->>> Reporter.objects.filter(id__exact=1)
-[<Reporter: John Smith>]
->>> Reporter.objects.filter(pk=1)
-[<Reporter: John Smith>]
->>> Reporter.objects.filter(first_name__startswith='John')
-[<Reporter: John Smith>]
-
-# Reporters can query in opposite direction of ForeignKey definition
->>> Reporter.objects.filter(article__id__exact=1)
-[<Reporter: John Smith>]
->>> Reporter.objects.filter(article__pk=1)
-[<Reporter: John Smith>]
->>> Reporter.objects.filter(article=1)
-[<Reporter: John Smith>]
->>> Reporter.objects.filter(article=a)
-[<Reporter: John Smith>]
-
->>> Reporter.objects.filter(article__in=[1,4]).distinct()
-[<Reporter: John Smith>]
->>> Reporter.objects.filter(article__in=[1,a3]).distinct()
-[<Reporter: John Smith>]
->>> Reporter.objects.filter(article__in=[a,a3]).distinct()
-[<Reporter: John Smith>]
-
->>> Reporter.objects.filter(article__headline__startswith='This')
-[<Reporter: John Smith>, <Reporter: John Smith>, <Reporter: John Smith>]
->>> Reporter.objects.filter(article__headline__startswith='This').distinct()
-[<Reporter: John Smith>]
-
-# Counting in the opposite direction works in conjunction with distinct()
->>> Reporter.objects.filter(article__headline__startswith='This').count()
-3
->>> Reporter.objects.filter(article__headline__startswith='This').distinct().count()
-1
-
-# Queries can go round in circles.
->>> Reporter.objects.filter(article__reporter__first_name__startswith='John')
-[<Reporter: John Smith>, <Reporter: John Smith>, <Reporter: John Smith>, <Reporter: John Smith>]
->>> Reporter.objects.filter(article__reporter__first_name__startswith='John').distinct()
-[<Reporter: John Smith>]
->>> Reporter.objects.filter(article__reporter__exact=r).distinct()
-[<Reporter: John Smith>]
-
-# Check that implied __exact also works
->>> Reporter.objects.filter(article__reporter=r).distinct()
-[<Reporter: John Smith>]
-
-# If you delete a reporter, his articles will be deleted.
->>> Article.objects.all()
-[<Article: John's second story>, <Article: Paul's story>, <Article: This is a test>, <Article: This is a test>, <Article: This is a test>]
->>> Reporter.objects.order_by('first_name')
-[<Reporter: John Smith>, <Reporter: Paul Jones>]
->>> r2.delete()
->>> Article.objects.all()
-[<Article: John's second story>, <Article: This is a test>, <Article: This is a test>, <Article: This is a test>]
->>> Reporter.objects.order_by('first_name')
-[<Reporter: John Smith>]
-
-# Deletes using a join in the query
->>> Reporter.objects.filter(article__headline__startswith='This').delete()
->>> Reporter.objects.all()
-[]
->>> Article.objects.all()
-[]
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/many_to_one_null/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,124 +0,0 @@
-"""
-16. Many-to-one relationships that can be null
-
-To define a many-to-one relationship that can have a null foreign key, use
-``ForeignKey()`` with ``null=True`` .
-"""
-
-from django.db import models
-
-class Reporter(models.Model):
-    name = models.CharField(maxlength=30)
-
-    def __str__(self):
-        return self.name
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100)
-    reporter = models.ForeignKey(Reporter, null=True)
-
-    class Meta:
-        ordering = ('headline',)
-
-    def __str__(self):
-        return self.headline
-
-__test__ = {'API_TESTS':"""
-# Create a Reporter.
->>> r = Reporter(name='John Smith')
->>> r.save()
-
-# Create an Article.
->>> a = Article(headline="First", reporter=r)
->>> a.save()
-
->>> a.reporter.id
-1
-
->>> a.reporter
-<Reporter: John Smith>
-
-# Article objects have access to their related Reporter objects.
->>> r = a.reporter
-
-# Create an Article via the Reporter object.
->>> a2 = r.article_set.create(headline="Second")
->>> a2
-<Article: Second>
->>> a2.reporter.id
-1
-
-# Reporter objects have access to their related Article objects.
->>> r.article_set.all()
-[<Article: First>, <Article: Second>]
->>> r.article_set.filter(headline__startswith='Fir')
-[<Article: First>]
->>> r.article_set.count()
-2
-
-# Create an Article with no Reporter by passing "reporter=None".
->>> a3 = Article(headline="Third", reporter=None)
->>> a3.save()
->>> a3.id
-3
->>> print a3.reporter
-None
-
-# Need to reget a3 to refresh the cache
->>> a3 = Article.objects.get(pk=3)
->>> print a3.reporter.id
-Traceback (most recent call last):
-    ...
-AttributeError: 'NoneType' object has no attribute 'id'
-
-# Accessing an article's 'reporter' attribute returns None
-# if the reporter is set to None.
->>> print a3.reporter
-None
-
-# To retrieve the articles with no reporters set, use "reporter__isnull=True".
->>> Article.objects.filter(reporter__isnull=True)
-[<Article: Third>]
-
-# Set the reporter for the Third article
->>> r.article_set.add(a3)
->>> r.article_set.all()
-[<Article: First>, <Article: Second>, <Article: Third>]
-
-# Remove an article from the set, and check that it was removed.
->>> r.article_set.remove(a3)
->>> r.article_set.all()
-[<Article: First>, <Article: Second>]
->>> Article.objects.filter(reporter__isnull=True)
-[<Article: Third>]
-
-# Create another article and reporter
->>> r2 = Reporter(name='Paul Jones')
->>> r2.save()
->>> a4 = r2.article_set.create(headline='Fourth')
->>> r2.article_set.all()
-[<Article: Fourth>]
-
-# Try to remove a4 from a set it does not belong to
->>> r.article_set.remove(a4)
-Traceback (most recent call last):
-...
-DoesNotExist: <Article: Fourth> is not related to <Reporter: John Smith>.
-
->>> r2.article_set.all()
-[<Article: Fourth>]
-
-# Use descriptor assignment to allocate ForeignKey. Null is legal, so
-# existing members of set that are not in the assignment set are set null
->>> r2.article_set = [a2, a3]
->>> r2.article_set.all()
-[<Article: Second>, <Article: Third>]
-
-# Clear the rest of the set
->>> r.article_set.clear()
->>> r.article_set.all()
-[]
->>> Article.objects.filter(reporter__isnull=True)
-[<Article: First>, <Article: Fourth>]
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/model_forms/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,464 +0,0 @@
-"""
-36. Generating HTML forms from models
-
-Django provides shortcuts for creating Form objects from a model class and a
-model instance.
-
-The function django.newforms.form_for_model() takes a model class and returns
-a Form that is tied to the model. This Form works just like any other Form,
-with one additional method: save(). The save() method creates an instance
-of the model and returns that newly created instance. It saves the instance to
-the database if save(commit=True), which is default. If you pass
-commit=False, then you'll get the object without committing the changes to the
-database.
-
-The function django.newforms.form_for_instance() takes a model instance and
-returns a Form that is tied to the instance. This form works just like any
-other Form, with one additional method: save(). The save()
-method updates the model instance. It also takes a commit=True parameter.
-
-The function django.newforms.save_instance() takes a bound form instance and a
-model instance and saves the form's clean_data into the instance. It also takes
-a commit=True parameter.
-"""
-
-from django.db import models
-
-class Category(models.Model):
-    name = models.CharField(maxlength=20)
-    url = models.CharField('The URL', maxlength=40)
-
-    def __str__(self):
-        return self.name
-
-class Writer(models.Model):
-    name = models.CharField(maxlength=50, help_text='Use both first and last names.')
-
-    def __str__(self):
-        return self.name
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=50)
-    pub_date = models.DateField()
-    created = models.DateField(editable=False)
-    writer = models.ForeignKey(Writer)
-    article = models.TextField()
-    categories = models.ManyToManyField(Category, blank=True)
-
-    def save(self):
-        import datetime
-        if not self.id:
-            self.created = datetime.date.today()
-        return super(Article, self).save()
-
-    def __str__(self):
-        return self.headline
-
-class PhoneNumber(models.Model):
-    phone = models.PhoneNumberField()
-    description = models.CharField(maxlength=20)
-
-    def __str__(self):
-        return self.phone
-
-__test__ = {'API_TESTS': """
->>> from django.newforms import form_for_model, form_for_instance, save_instance, BaseForm, Form, CharField
->>> import datetime
-
->>> Category.objects.all()
-[]
-
->>> CategoryForm = form_for_model(Category)
->>> f = CategoryForm()
->>> print f
-<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
-<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>
->>> print f.as_ul()
-<li><label for="id_name">Name:</label> <input id="id_name" type="text" name="name" maxlength="20" /></li>
-<li><label for="id_url">The URL:</label> <input id="id_url" type="text" name="url" maxlength="40" /></li>
->>> print f['name']
-<input id="id_name" type="text" name="name" maxlength="20" />
-
->>> f = CategoryForm(auto_id=False)
->>> print f.as_ul()
-<li>Name: <input type="text" name="name" maxlength="20" /></li>
-<li>The URL: <input type="text" name="url" maxlength="40" /></li>
-
->>> f = CategoryForm({'name': 'Entertainment', 'url': 'entertainment'})
->>> f.is_valid()
-True
->>> f.clean_data
-{'url': u'entertainment', 'name': u'Entertainment'}
->>> obj = f.save()
->>> obj
-<Category: Entertainment>
->>> Category.objects.all()
-[<Category: Entertainment>]
-
->>> f = CategoryForm({'name': "It's a test", 'url': 'test'})
->>> f.is_valid()
-True
->>> f.clean_data
-{'url': u'test', 'name': u"It's a test"}
->>> obj = f.save()
->>> obj
-<Category: It's a test>
->>> Category.objects.all()
-[<Category: Entertainment>, <Category: It's a test>]
-
-If you call save() with commit=False, then it will return an object that
-hasn't yet been saved to the database. In this case, it's up to you to call
-save() on the resulting model instance.
->>> f = CategoryForm({'name': 'Third test', 'url': 'third'})
->>> f.is_valid()
-True
->>> f.clean_data
-{'url': u'third', 'name': u'Third test'}
->>> obj = f.save(commit=False)
->>> obj
-<Category: Third test>
->>> Category.objects.all()
-[<Category: Entertainment>, <Category: It's a test>]
->>> obj.save()
->>> Category.objects.all()
-[<Category: Entertainment>, <Category: It's a test>, <Category: Third test>]
-
-If you call save() with invalid data, you'll get a ValueError.
->>> f = CategoryForm({'name': '', 'url': 'foo'})
->>> f.errors
-{'name': [u'This field is required.']}
->>> f.clean_data
-Traceback (most recent call last):
-...
-AttributeError: 'CategoryForm' object has no attribute 'clean_data'
->>> f.save()
-Traceback (most recent call last):
-...
-ValueError: The Category could not be created because the data didn't validate.
->>> f = CategoryForm({'name': '', 'url': 'foo'})
->>> f.save()
-Traceback (most recent call last):
-...
-ValueError: The Category could not be created because the data didn't validate.
-
-Create a couple of Writers.
->>> w = Writer(name='Mike Royko')
->>> w.save()
->>> w = Writer(name='Bob Woodward')
->>> w.save()
-
-ManyToManyFields are represented by a MultipleChoiceField, and ForeignKeys are
-represented by a ChoiceField.
->>> ArticleForm = form_for_model(Article)
->>> f = ArticleForm(auto_id=False)
->>> print f
-<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
-<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>
-<tr><th>Writer:</th><td><select name="writer">
-<option value="" selected="selected">---------</option>
-<option value="1">Mike Royko</option>
-<option value="2">Bob Woodward</option>
-</select></td></tr>
-<tr><th>Article:</th><td><textarea name="article"></textarea></td></tr>
-<tr><th>Categories:</th><td><select multiple="multiple" name="categories">
-<option value="1">Entertainment</option>
-<option value="2">It&#39;s a test</option>
-<option value="3">Third test</option>
-</select><br /> Hold down "Control", or "Command" on a Mac, to select more than one.</td></tr>
-
-You can pass a custom Form class to form_for_model. Make sure it's a
-subclass of BaseForm, not Form.
->>> class CustomForm(BaseForm):
-...     def say_hello(self):
-...         print 'hello'
->>> CategoryForm = form_for_model(Category, form=CustomForm)
->>> f = CategoryForm()
->>> f.say_hello()
-hello
-
-Use form_for_instance to create a Form from a model instance. The difference
-between this Form and one created via form_for_model is that the object's
-current values are inserted as 'initial' data in each Field.
->>> w = Writer.objects.get(name='Mike Royko')
->>> RoykoForm = form_for_instance(w)
->>> f = RoykoForm(auto_id=False)
->>> print f
-<tr><th>Name:</th><td><input type="text" name="name" value="Mike Royko" maxlength="50" /><br />Use both first and last names.</td></tr>
-
->>> art = Article(headline='Test article', pub_date=datetime.date(1988, 1, 4), writer=w, article='Hello.')
->>> art.save()
->>> art.id
-1
->>> TestArticleForm = form_for_instance(art)
->>> f = TestArticleForm(auto_id=False)
->>> print f.as_ul()
-<li>Headline: <input type="text" name="headline" value="Test article" maxlength="50" /></li>
-<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
-<li>Writer: <select name="writer">
-<option value="">---------</option>
-<option value="1" selected="selected">Mike Royko</option>
-<option value="2">Bob Woodward</option>
-</select></li>
-<li>Article: <textarea name="article">Hello.</textarea></li>
-<li>Categories: <select multiple="multiple" name="categories">
-<option value="1">Entertainment</option>
-<option value="2">It&#39;s a test</option>
-<option value="3">Third test</option>
-</select>  Hold down "Control", or "Command" on a Mac, to select more than one.</li>
->>> f = TestArticleForm({'headline': u'New headline', 'pub_date': u'1988-01-04', 'writer': u'1', 'article': 'Hello.'})
->>> f.is_valid()
-True
->>> new_art = f.save()
->>> new_art.id
-1
->>> new_art = Article.objects.get(id=1)
->>> new_art.headline
-'New headline'
-
-Add some categories and test the many-to-many form output.
->>> new_art.categories.all()
-[]
->>> new_art.categories.add(Category.objects.get(name='Entertainment'))
->>> new_art.categories.all()
-[<Category: Entertainment>]
->>> TestArticleForm = form_for_instance(new_art)
->>> f = TestArticleForm(auto_id=False)
->>> print f.as_ul()
-<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
-<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
-<li>Writer: <select name="writer">
-<option value="">---------</option>
-<option value="1" selected="selected">Mike Royko</option>
-<option value="2">Bob Woodward</option>
-</select></li>
-<li>Article: <textarea name="article">Hello.</textarea></li>
-<li>Categories: <select multiple="multiple" name="categories">
-<option value="1" selected="selected">Entertainment</option>
-<option value="2">It&#39;s a test</option>
-<option value="3">Third test</option>
-</select>  Hold down "Control", or "Command" on a Mac, to select more than one.</li>
-
->>> f = TestArticleForm({'headline': u'New headline', 'pub_date': u'1988-01-04',
-...     'writer': u'1', 'article': u'Hello.', 'categories': [u'1', u'2']})
->>> new_art = f.save()
->>> new_art.id
-1
->>> new_art = Article.objects.get(id=1)
->>> new_art.categories.all()
-[<Category: Entertainment>, <Category: It's a test>]
-
-Now, submit form data with no categories. This deletes the existing categories.
->>> f = TestArticleForm({'headline': u'New headline', 'pub_date': u'1988-01-04',
-...     'writer': u'1', 'article': u'Hello.'})
->>> new_art = f.save()
->>> new_art.id
-1
->>> new_art = Article.objects.get(id=1)
->>> new_art.categories.all()
-[]
-
-Create a new article, with categories, via the form.
->>> ArticleForm = form_for_model(Article)
->>> f = ArticleForm({'headline': u'The walrus was Paul', 'pub_date': u'1967-11-01',
-...     'writer': u'1', 'article': u'Test.', 'categories': [u'1', u'2']})
->>> new_art = f.save()
->>> new_art.id
-2
->>> new_art = Article.objects.get(id=2)
->>> new_art.categories.all()
-[<Category: Entertainment>, <Category: It's a test>]
-
-Create a new article, with no categories, via the form.
->>> ArticleForm = form_for_model(Article)
->>> f = ArticleForm({'headline': u'The walrus was Paul', 'pub_date': u'1967-11-01',
-...     'writer': u'1', 'article': u'Test.'})
->>> new_art = f.save()
->>> new_art.id
-3
->>> new_art = Article.objects.get(id=3)
->>> new_art.categories.all()
-[]
-
-Here, we define a custom Form. Because it happens to have the same fields as
-the Category model, we can use save_instance() to apply its changes to an
-existing Category instance.
->>> class ShortCategory(Form):
-...     name = CharField(max_length=5)
-...     url = CharField(max_length=3)
->>> cat = Category.objects.get(name='Third test')
->>> cat
-<Category: Third test>
->>> cat.id
-3
->>> sc = ShortCategory({'name': 'Third', 'url': '3rd'})
->>> save_instance(sc, cat)
-<Category: Third>
->>> Category.objects.get(id=3)
-<Category: Third>
-
-Here, we demonstrate that choices for a ForeignKey ChoiceField are determined
-at runtime, based on the data in the database when the form is displayed, not
-the data in the database when the form is instantiated.
->>> ArticleForm = form_for_model(Article)
->>> f = ArticleForm(auto_id=False)
->>> print f.as_ul()
-<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
-<li>Pub date: <input type="text" name="pub_date" /></li>
-<li>Writer: <select name="writer">
-<option value="" selected="selected">---------</option>
-<option value="1">Mike Royko</option>
-<option value="2">Bob Woodward</option>
-</select></li>
-<li>Article: <textarea name="article"></textarea></li>
-<li>Categories: <select multiple="multiple" name="categories">
-<option value="1">Entertainment</option>
-<option value="2">It&#39;s a test</option>
-<option value="3">Third</option>
-</select>  Hold down "Control", or "Command" on a Mac, to select more than one.</li>
->>> Category.objects.create(name='Fourth', url='4th')
-<Category: Fourth>
->>> Writer.objects.create(name='Carl Bernstein')
-<Writer: Carl Bernstein>
->>> print f.as_ul()
-<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
-<li>Pub date: <input type="text" name="pub_date" /></li>
-<li>Writer: <select name="writer">
-<option value="" selected="selected">---------</option>
-<option value="1">Mike Royko</option>
-<option value="2">Bob Woodward</option>
-<option value="3">Carl Bernstein</option>
-</select></li>
-<li>Article: <textarea name="article"></textarea></li>
-<li>Categories: <select multiple="multiple" name="categories">
-<option value="1">Entertainment</option>
-<option value="2">It&#39;s a test</option>
-<option value="3">Third</option>
-<option value="4">Fourth</option>
-</select>  Hold down "Control", or "Command" on a Mac, to select more than one.</li>
-
-# ModelChoiceField ############################################################
-
->>> from django.newforms import ModelChoiceField, ModelMultipleChoiceField
-
->>> f = ModelChoiceField(Category.objects.all())
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(0)
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. That choice is not one of the available choices.']
->>> f.clean(3)
-<Category: Third>
->>> f.clean(2)
-<Category: It's a test>
-
-# Add a Category object *after* the ModelChoiceField has already been
-# instantiated. This proves clean() checks the database during clean() rather
-# than caching it at time of instantiation.
->>> Category.objects.create(name='Fifth', url='5th')
-<Category: Fifth>
->>> f.clean(5)
-<Category: Fifth>
-
-# Delete a Category object *after* the ModelChoiceField has already been
-# instantiated. This proves clean() checks the database during clean() rather
-# than caching it at time of instantiation.
->>> Category.objects.get(url='5th').delete()
->>> f.clean(5)
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. That choice is not one of the available choices.']
-
->>> f = ModelChoiceField(Category.objects.filter(pk=1), required=False)
->>> print f.clean('')
-None
->>> f.clean('')
->>> f.clean('1')
-<Category: Entertainment>
->>> f.clean('100')
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. That choice is not one of the available choices.']
-
-# ModelMultipleChoiceField ####################################################
-
->>> f = ModelMultipleChoiceField(Category.objects.all())
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean([])
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean([1])
-[<Category: Entertainment>]
->>> f.clean([2])
-[<Category: It's a test>]
->>> f.clean(['1'])
-[<Category: Entertainment>]
->>> f.clean(['1', '2'])
-[<Category: Entertainment>, <Category: It's a test>]
->>> f.clean([1, '2'])
-[<Category: Entertainment>, <Category: It's a test>]
->>> f.clean((1, '2'))
-[<Category: Entertainment>, <Category: It's a test>]
->>> f.clean(['100'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. 100 is not one of the available choices.']
->>> f.clean('hello')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a list of values.']
-
-# Add a Category object *after* the ModelChoiceField has already been
-# instantiated. This proves clean() checks the database during clean() rather
-# than caching it at time of instantiation.
->>> Category.objects.create(id=6, name='Sixth', url='6th')
-<Category: Sixth>
->>> f.clean([6])
-[<Category: Sixth>]
-
-# Delete a Category object *after* the ModelChoiceField has already been
-# instantiated. This proves clean() checks the database during clean() rather
-# than caching it at time of instantiation.
->>> Category.objects.get(url='6th').delete()
->>> f.clean([6])
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. 6 is not one of the available choices.']
-
->>> f = ModelMultipleChoiceField(Category.objects.all(), required=False)
->>> f.clean([])
-[]
->>> f.clean(())
-[]
->>> f.clean(['10'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. 10 is not one of the available choices.']
->>> f.clean(['3', '10'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. 10 is not one of the available choices.']
->>> f.clean(['1', '10'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. 10 is not one of the available choices.']
-
-# PhoneNumberField ############################################################
-
->>> PhoneNumberForm = form_for_model(PhoneNumber)
->>> f = PhoneNumberForm({'phone': '(312) 555-1212', 'description': 'Assistance'})
->>> f.is_valid()
-True
->>> f.clean_data
-{'phone': u'312-555-1212', 'description': u'Assistance'}
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/model_inheritance/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,53 +0,0 @@
-"""
-XX. Model inheritance
-
-Model inheritance isn't yet supported.
-"""
-
-from django.db import models
-
-class Place(models.Model):
-    name = models.CharField(maxlength=50)
-    address = models.CharField(maxlength=80)
-
-    def __str__(self):
-        return "%s the place" % self.name
-
-class Restaurant(Place):
-    serves_hot_dogs = models.BooleanField()
-    serves_pizza = models.BooleanField()
-
-    def __str__(self):
-        return "%s the restaurant" % self.name
-
-class ItalianRestaurant(Restaurant):
-    serves_gnocchi = models.BooleanField()
-
-    def __str__(self):
-        return "%s the italian restaurant" % self.name
-
-__test__ = {'API_TESTS':"""
-# Make sure Restaurant has the right fields in the right order.
->>> [f.name for f in Restaurant._meta.fields]
-['id', 'name', 'address', 'serves_hot_dogs', 'serves_pizza']
-
-# Make sure ItalianRestaurant has the right fields in the right order.
->>> [f.name for f in ItalianRestaurant._meta.fields]
-['id', 'name', 'address', 'serves_hot_dogs', 'serves_pizza', 'serves_gnocchi']
-
-# Create a couple of Places.
->>> p1 = Place(name='Master Shakes', address='666 W. Jersey')
->>> p1.save()
->>> p2 = Place(name='Ace Hardware', address='1013 N. Ashland')
->>> p2.save()
-
-# Test constructor for Restaurant.
->>> r = Restaurant(name='Demon Dogs', address='944 W. Fullerton', serves_hot_dogs=True, serves_pizza=False)
->>> r.save()
-
-# Test the constructor for ItalianRestaurant.
->>> ir = ItalianRestaurant(name='Ristorante Miron', address='1234 W. Elm', serves_hot_dogs=False, serves_pizza=False, serves_gnocchi=True)
->>> ir.save()
-
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/mutually_referential/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,32 +0,0 @@
-"""
-24. Mutually referential many-to-one relationships
-
-To define a many-to-one relationship, use ``ForeignKey()`` .
-"""
-
-from django.db.models import *
-
-class Parent(Model):
-    name = CharField(maxlength=100, core=True)
-    bestchild = ForeignKey("Child", null=True, related_name="favoured_by")
-
-class Child(Model):
-    name = CharField(maxlength=100)
-    parent = ForeignKey(Parent)
-
-__test__ = {'API_TESTS':"""
-# Create a Parent
->>> q = Parent(name='Elizabeth')
->>> q.save()
-
-# Create some children
->>> c = q.child_set.create(name='Charles')
->>> e = q.child_set.create(name='Edward')
-
-# Set the best child
->>> q.bestchild = c
->>> q.save()
-
->>> q.delete()
-
-"""}
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/one_to_one/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,168 +0,0 @@
-"""
-10. One-to-one relationships
-
-To define a one-to-one relationship, use ``OneToOneField()``.
-
-In this example, a ``Place`` optionally can be a ``Restaurant``.
-"""
-
-from django.db import models
-
-class Place(models.Model):
-    name = models.CharField(maxlength=50)
-    address = models.CharField(maxlength=80)
-
-    def __str__(self):
-        return "%s the place" % self.name
-
-class Restaurant(models.Model):
-    place = models.OneToOneField(Place)
-    serves_hot_dogs = models.BooleanField()
-    serves_pizza = models.BooleanField()
-
-    def __str__(self):
-        return "%s the restaurant" % self.place.name
-
-class Waiter(models.Model):
-    restaurant = models.ForeignKey(Restaurant)
-    name = models.CharField(maxlength=50)
-
-    def __str__(self):
-        return "%s the waiter at %s" % (self.name, self.restaurant)
-
-class ManualPrimaryKey(models.Model):
-    primary_key = models.CharField(maxlength=10, primary_key=True)
-    name = models.CharField(maxlength = 50)
-
-class RelatedModel(models.Model):
-    link = models.OneToOneField(ManualPrimaryKey)
-    name = models.CharField(maxlength = 50)
-
-__test__ = {'API_TESTS':"""
-# Create a couple of Places.
->>> p1 = Place(name='Demon Dogs', address='944 W. Fullerton')
->>> p1.save()
->>> p2 = Place(name='Ace Hardware', address='1013 N. Ashland')
->>> p2.save()
-
-# Create a Restaurant. Pass the ID of the "parent" object as this object's ID.
->>> r = Restaurant(place=p1, serves_hot_dogs=True, serves_pizza=False)
->>> r.save()
-
-# A Restaurant can access its place.
->>> r.place
-<Place: Demon Dogs the place>
-
-# A Place can access its restaurant, if available.
->>> p1.restaurant
-<Restaurant: Demon Dogs the restaurant>
-
-# p2 doesn't have an associated restaurant.
->>> p2.restaurant
-Traceback (most recent call last):
-    ...
-DoesNotExist: Restaurant matching query does not exist.
-
-# Set the place using assignment notation. Because place is the primary key on Restaurant,
-# the save will create a new restaurant
->>> r.place = p2
->>> r.save()
->>> p2.restaurant
-<Restaurant: Ace Hardware the restaurant>
->>> r.place
-<Place: Ace Hardware the place>
-
-# Set the place back again, using assignment in the reverse direction
-# Need to reget restaurant object first, because the reverse set
-# can't update the existing restaurant instance
->>> p1.restaurant = r
->>> r.save()
->>> p1.restaurant
-<Restaurant: Demon Dogs the restaurant>
-
->>> r = Restaurant.objects.get(pk=1)
->>> r.place
-<Place: Demon Dogs the place>
-
-# Restaurant.objects.all() just returns the Restaurants, not the Places.
-# Note that there are two restaurants - Ace Hardware the Restaurant was created
-# in the call to r.place = p2. This means there are multiple restaurants referencing
-# a single place...
->>> Restaurant.objects.all()
-[<Restaurant: Demon Dogs the restaurant>, <Restaurant: Ace Hardware the restaurant>]
-
-# Place.objects.all() returns all Places, regardless of whether they have
-# Restaurants.
->>> Place.objects.order_by('name')
-[<Place: Ace Hardware the place>, <Place: Demon Dogs the place>]
-
->>> Restaurant.objects.get(place__id__exact=1)
-<Restaurant: Demon Dogs the restaurant>
->>> Restaurant.objects.get(pk=1)
-<Restaurant: Demon Dogs the restaurant>
->>> Restaurant.objects.get(place__exact=1)
-<Restaurant: Demon Dogs the restaurant>
->>> Restaurant.objects.get(place__exact=p1)
-<Restaurant: Demon Dogs the restaurant>
->>> Restaurant.objects.get(place=1)
-<Restaurant: Demon Dogs the restaurant>
->>> Restaurant.objects.get(place=p1)
-<Restaurant: Demon Dogs the restaurant>
->>> Restaurant.objects.get(place__pk=1)
-<Restaurant: Demon Dogs the restaurant>
->>> Restaurant.objects.get(place__name__startswith="Demon")
-<Restaurant: Demon Dogs the restaurant>
-
->>> Place.objects.get(id__exact=1)
-<Place: Demon Dogs the place>
->>> Place.objects.get(pk=1)
-<Place: Demon Dogs the place>
->>> Place.objects.get(restaurant__place__exact=1)
-<Place: Demon Dogs the place>
->>> Place.objects.get(restaurant__place__exact=p1)
-<Place: Demon Dogs the place>
->>> Place.objects.get(restaurant__pk=1)
-<Place: Demon Dogs the place>
->>> Place.objects.get(restaurant=1)
-<Place: Demon Dogs the place>
->>> Place.objects.get(restaurant=r)
-<Place: Demon Dogs the place>
->>> Place.objects.get(restaurant__exact=1)
-<Place: Demon Dogs the place>
->>> Place.objects.get(restaurant__exact=r)
-<Place: Demon Dogs the place>
-
-# Add a Waiter to the Restaurant.
->>> w = r.waiter_set.create(name='Joe')
->>> w.save()
->>> w
-<Waiter: Joe the waiter at Demon Dogs the restaurant>
-
-# Query the waiters
->>> Waiter.objects.filter(restaurant__place__pk=1)
-[<Waiter: Joe the waiter at Demon Dogs the restaurant>]
->>> Waiter.objects.filter(restaurant__place__exact=1)
-[<Waiter: Joe the waiter at Demon Dogs the restaurant>]
->>> Waiter.objects.filter(restaurant__place__exact=p1)
-[<Waiter: Joe the waiter at Demon Dogs the restaurant>]
->>> Waiter.objects.filter(restaurant__pk=1)
-[<Waiter: Joe the waiter at Demon Dogs the restaurant>]
->>> Waiter.objects.filter(id__exact=1)
-[<Waiter: Joe the waiter at Demon Dogs the restaurant>]
->>> Waiter.objects.filter(pk=1)
-[<Waiter: Joe the waiter at Demon Dogs the restaurant>]
->>> Waiter.objects.filter(restaurant=1)
-[<Waiter: Joe the waiter at Demon Dogs the restaurant>]
->>> Waiter.objects.filter(restaurant=r)
-[<Waiter: Joe the waiter at Demon Dogs the restaurant>]
-
-# Delete the restaurant; the waiter should also be removed
->>> r = Restaurant.objects.get(pk=1)
->>> r.delete()
-
-# One-to-one fields still work if you create your own primary key
->>> o1 = ManualPrimaryKey(primary_key="abc123", name="primary")
->>> o1.save()
->>> o2 = RelatedModel(link=o1, name="secondary")
->>> o2.save()
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/or_lookups/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,119 +0,0 @@
-"""
-19. OR lookups
-
-To perform an OR lookup, or a lookup that combines ANDs and ORs,
-combine QuerySet objects using & and | operators.
-
-Alternatively, use positional arguments, and pass one or more expressions
-of clauses using the variable ``django.db.models.Q`` (or any object with
-a get_sql method).
-
-
-"""
-
-from django.db import models
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=50)
-    pub_date = models.DateTimeField()
-
-    class Meta:
-       ordering = ('pub_date',)
-
-    def __str__(self):
-        return self.headline
-
-__test__ = {'API_TESTS':"""
->>> from datetime import datetime
->>> from django.db.models import Q
-
->>> a1 = Article(headline='Hello', pub_date=datetime(2005, 11, 27))
->>> a1.save()
-
->>> a2 = Article(headline='Goodbye', pub_date=datetime(2005, 11, 28))
->>> a2.save()
-
->>> a3 = Article(headline='Hello and goodbye', pub_date=datetime(2005, 11, 29))
->>> a3.save()
-
->>> Article.objects.filter(headline__startswith='Hello') |  Article.objects.filter(headline__startswith='Goodbye')
-[<Article: Hello>, <Article: Goodbye>, <Article: Hello and goodbye>]
-
->>> Article.objects.filter(Q(headline__startswith='Hello') | Q(headline__startswith='Goodbye'))
-[<Article: Hello>, <Article: Goodbye>, <Article: Hello and goodbye>]
-
->>> Article.objects.filter(Q(headline__startswith='Hello') & Q(headline__startswith='Goodbye'))
-[]
-
-# You can shorten this syntax with code like the following,
-# which is especially useful if building the query in stages:
->>> articles = Article.objects.all()
->>> articles.filter(headline__startswith='Hello') & articles.filter(headline__startswith='Goodbye')
-[]
-
->>> articles.filter(headline__startswith='Hello') & articles.filter(headline__contains='bye')
-[<Article: Hello and goodbye>]
-
->>> Article.objects.filter(Q(headline__contains='bye'), headline__startswith='Hello')
-[<Article: Hello and goodbye>]
-
->>> Article.objects.filter(headline__contains='Hello') | Article.objects.filter(headline__contains='bye')
-[<Article: Hello>, <Article: Goodbye>, <Article: Hello and goodbye>]
-
->>> Article.objects.filter(headline__iexact='Hello') | Article.objects.filter(headline__contains='ood')
-[<Article: Hello>, <Article: Goodbye>, <Article: Hello and goodbye>]
-
->>> Article.objects.filter(Q(pk=1) | Q(pk=2))
-[<Article: Hello>, <Article: Goodbye>]
-
->>> Article.objects.filter(Q(pk=1) | Q(pk=2) | Q(pk=3))
-[<Article: Hello>, <Article: Goodbye>, <Article: Hello and goodbye>]
-
-# You could also use "in" to accomplish the same as above.
->>> Article.objects.filter(pk__in=[1,2,3])
-[<Article: Hello>, <Article: Goodbye>, <Article: Hello and goodbye>]
-
->>> Article.objects.filter(pk__in=[1,2,3,4])
-[<Article: Hello>, <Article: Goodbye>, <Article: Hello and goodbye>]
-
-# Passing "in" an empty list returns no results ...
->>> Article.objects.filter(pk__in=[])
-[]
-
-# ... but can return results if we OR it with another query.
->>> Article.objects.filter(Q(pk__in=[]) | Q(headline__icontains='goodbye'))
-[<Article: Goodbye>, <Article: Hello and goodbye>]
-
-# Q arg objects are ANDed
->>> Article.objects.filter(Q(headline__startswith='Hello'), Q(headline__contains='bye'))
-[<Article: Hello and goodbye>]
-
-# Q arg AND order is irrelevant
->>> Article.objects.filter(Q(headline__contains='bye'), headline__startswith='Hello')
-[<Article: Hello and goodbye>]
-
-# Try some arg queries with operations other than get_list
->>> Article.objects.get(Q(headline__startswith='Hello'), Q(headline__contains='bye'))
-<Article: Hello and goodbye>
-
->>> Article.objects.filter(Q(headline__startswith='Hello') | Q(headline__contains='bye')).count()
-3
-
->>> list(Article.objects.filter(Q(headline__startswith='Hello'), Q(headline__contains='bye')).values())
-[{'headline': 'Hello and goodbye', 'pub_date': datetime.datetime(2005, 11, 29, 0, 0), 'id': 3}]
-
->>> Article.objects.filter(Q(headline__startswith='Hello')).in_bulk([1,2])
-{1: <Article: Hello>}
-
-# Demonstrating exclude with a Q object
->>> Article.objects.exclude(Q(headline__startswith='Hello'))
-[<Article: Goodbye>]
-
-# The 'complex_filter' method supports framework features such as
-# 'limit_choices_to' which normally take a single dictionary of lookup arguments
-# but need to support arbitrary queries via Q objects too.
->>> Article.objects.complex_filter({'pk': 1})
-[<Article: Hello>]
->>> Article.objects.complex_filter(Q(pk=1) | Q(pk=2))
-[<Article: Hello>, <Article: Goodbye>]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/ordering/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,67 +0,0 @@
-"""
-6. Specifying ordering
-
-Specify default ordering for a model using the ``ordering`` attribute, which
-should be a list or tuple of field names. This tells Django how to order the
-results of ``get_list()`` and other similar functions.
-
-If a field name in ``ordering`` starts with a hyphen, that field will be
-ordered in descending order. Otherwise, it'll be ordered in ascending order.
-The special-case field name ``"?"`` specifies random order.
-
-The ordering attribute is not required. If you leave it off, ordering will be
-undefined -- not random, just undefined.
-"""
-
-from django.db import models
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100)
-    pub_date = models.DateTimeField()
-    class Meta:
-        ordering = ('-pub_date', 'headline')
-
-    def __str__(self):
-        return self.headline
-
-__test__ = {'API_TESTS':"""
-# Create a couple of Articles.
->>> from datetime import datetime
->>> a1 = Article(headline='Article 1', pub_date=datetime(2005, 7, 26))
->>> a1.save()
->>> a2 = Article(headline='Article 2', pub_date=datetime(2005, 7, 27))
->>> a2.save()
->>> a3 = Article(headline='Article 3', pub_date=datetime(2005, 7, 27))
->>> a3.save()
->>> a4 = Article(headline='Article 4', pub_date=datetime(2005, 7, 28))
->>> a4.save()
-
-# By default, Article.objects.all() orders by pub_date descending, then
-# headline ascending.
->>> Article.objects.all()
-[<Article: Article 4>, <Article: Article 2>, <Article: Article 3>, <Article: Article 1>]
-
-# Override ordering with order_by, which is in the same format as the ordering
-# attribute in models.
->>> Article.objects.order_by('headline')
-[<Article: Article 1>, <Article: Article 2>, <Article: Article 3>, <Article: Article 4>]
->>> Article.objects.order_by('pub_date', '-headline')
-[<Article: Article 1>, <Article: Article 3>, <Article: Article 2>, <Article: Article 4>]
-
-# Use the 'stop' part of slicing notation to limit the results.
->>> Article.objects.order_by('headline')[:2]
-[<Article: Article 1>, <Article: Article 2>]
-
-# Use the 'stop' and 'start' parts of slicing notation to offset the result list.
->>> Article.objects.order_by('headline')[1:3]
-[<Article: Article 2>, <Article: Article 3>]
-
-# Getting a single item should work too:
->>> Article.objects.all()[0]
-<Article: Article 4>
-
-# Use '?' to order randomly. (We're using [...] in the output to indicate we
-# don't know what order the output will be in.
->>> Article.objects.order_by('?')
-[...]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/pagination/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,80 +0,0 @@
-"""
-30. Object pagination
-
-Django provides a framework for paginating a list of objects in a few lines
-of code. This is often useful for dividing search results or long lists of
-objects into easily readable pages.
-"""
-
-from django.db import models
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100, default='Default headline')
-    pub_date = models.DateTimeField()
-
-    def __str__(self):
-        return self.headline
-
-__test__ = {'API_TESTS':"""
-# prepare a list of objects for pagination
->>> from datetime import datetime
->>> for x in range(1, 10):
-...     a = Article(headline='Article %s' % x, pub_date=datetime(2005, 7, 29))
-...     a.save()
-
-# create a basic paginator, 5 articles per page
->>> from django.core.paginator import ObjectPaginator, InvalidPage
->>> paginator = ObjectPaginator(Article.objects.all(), 5)
-
-# the paginator knows how many hits and pages it contains
->>> paginator.hits
-9
-
->>> paginator.pages
-2
-
-# get the first page (zero-based)
->>> paginator.get_page(0)
-[<Article: Article 1>, <Article: Article 2>, <Article: Article 3>, <Article: Article 4>, <Article: Article 5>]
-
-# get the second page
->>> paginator.get_page(1)
-[<Article: Article 6>, <Article: Article 7>, <Article: Article 8>, <Article: Article 9>]
-
-# does the first page have a next or previous page?
->>> paginator.has_next_page(0)
-True
-
->>> paginator.has_previous_page(0)
-False
-
-# check the second page
->>> paginator.has_next_page(1)
-False
-
->>> paginator.has_previous_page(1)
-True
-
->>> paginator.first_on_page(0)
-1
->>> paginator.first_on_page(1)
-6
->>> paginator.last_on_page(0)
-5
->>> paginator.last_on_page(1)
-9
-
-# Add a few more records to test out the orphans feature.
->>> for x in range(10, 13):
-...     Article(headline="Article %s" % x, pub_date=datetime(2006, 10, 6)).save()
-
-# With orphans set to 3 and 10 items per page, we should get all 12 items on a single page:
->>> paginator = ObjectPaginator(Article.objects.all(), 10, orphans=3)
->>> paginator.pages
-1
-
-# With orphans only set to 1, we should get two pages:
->>> paginator = ObjectPaginator(Article.objects.all(), 10, orphans=1)
->>> paginator.pages
-2
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/properties/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,40 +0,0 @@
-"""
-22. Using properties on models
-
-Use properties on models just like on any other Python object.
-"""
-
-from django.db import models
-
-class Person(models.Model):
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-
-    def _get_full_name(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-    def _set_full_name(self, combined_name):
-        self.first_name, self.last_name = combined_name.split(' ', 1)
-
-    full_name = property(_get_full_name)
-
-    full_name_2 = property(_get_full_name, _set_full_name)
-
-__test__ = {'API_TESTS':"""
->>> a = Person(first_name='John', last_name='Lennon')
->>> a.save()
->>> a.full_name
-'John Lennon'
-
-# The "full_name" property hasn't provided a "set" method.
->>> a.full_name = 'Paul McCartney'
-Traceback (most recent call last):
-    ...
-AttributeError: can't set attribute
-
-# But "full_name_2" has, and it can be used to initialise the class.
->>> a2 = Person(full_name_2 = 'Paul McCartney')
->>> a2.save()
->>> a2.first_name
-'Paul'
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/reserved_names/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,56 +0,0 @@
-"""
-18. Using SQL reserved names
-
-Need to use a reserved SQL name as a column name or table name? Need to include
-a hyphen in a column or table name? No problem. Django quotes names
-appropriately behind the scenes, so your database won't complain about
-reserved-name usage.
-"""
-
-from django.db import models
-
-class Thing(models.Model):
-    when = models.CharField(maxlength=1, primary_key=True)
-    join = models.CharField(maxlength=1)
-    like = models.CharField(maxlength=1)
-    drop = models.CharField(maxlength=1)
-    alter = models.CharField(maxlength=1)
-    having = models.CharField(maxlength=1)
-    where = models.DateField(maxlength=1)
-    has_hyphen = models.CharField(maxlength=1, db_column='has-hyphen')
-    class Meta:
-       db_table = 'select'
-
-    def __str__(self):
-        return self.when
-
-__test__ = {'API_TESTS':"""
->>> import datetime
->>> day1 = datetime.date(2005, 1, 1)
->>> day2 = datetime.date(2006, 2, 2)
->>> t = Thing(when='a', join='b', like='c', drop='d', alter='e', having='f', where=day1, has_hyphen='h')
->>> t.save()
->>> print t.when
-a
-
->>> u = Thing(when='h', join='i', like='j', drop='k', alter='l', having='m', where=day2)
->>> u.save()
->>> print u.when
-h
-
->>> Thing.objects.order_by('when')
-[<Thing: a>, <Thing: h>]
->>> v = Thing.objects.get(pk='a')
->>> print v.join
-b
->>> print v.where
-2005-01-01
->>> Thing.objects.order_by('select.when')
-[<Thing: a>, <Thing: h>]
-
->>> Thing.objects.dates('where', 'year')
-[datetime.datetime(2005, 1, 1, 0, 0), datetime.datetime(2006, 1, 1, 0, 0)]
-
->>> Thing.objects.filter(where__month=1)
-[<Thing: a>]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/reverse_lookup/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,59 +0,0 @@
-"""
-25. Reverse lookups
-
-This demonstrates the reverse lookup features of the database API.
-"""
-
-from django.db import models
-
-class User(models.Model):
-    name = models.CharField(maxlength=200)
-
-    def __str__(self):
-        return self.name
-
-class Poll(models.Model):
-    question = models.CharField(maxlength=200)
-    creator = models.ForeignKey(User)
-
-    def __str__(self):
-        return self.question
-
-class Choice(models.Model):
-    name = models.CharField(maxlength=100)
-    poll = models.ForeignKey(Poll, related_name="poll_choice")
-    related_poll = models.ForeignKey(Poll, related_name="related_choice")
-
-    def __str(self):
-        return self.name
-
-__test__ = {'API_TESTS':"""
->>> john = User(name="John Doe")
->>> john.save()
->>> jim = User(name="Jim Bo")
->>> jim.save()
->>> first_poll = Poll(question="What's the first question?", creator=john)
->>> first_poll.save()
->>> second_poll = Poll(question="What's the second question?", creator=jim)
->>> second_poll.save()
->>> new_choice = Choice(poll=first_poll, related_poll=second_poll, name="This is the answer.")
->>> new_choice.save()
-
->>> # Reverse lookups by field name:
->>> User.objects.get(poll__question__exact="What's the first question?")
-<User: John Doe>
->>> User.objects.get(poll__question__exact="What's the second question?")
-<User: Jim Bo>
-
->>> # Reverse lookups by related_name:
->>> Poll.objects.get(poll_choice__name__exact="This is the answer.")
-<Poll: What's the first question?>
->>> Poll.objects.get(related_choice__name__exact="This is the answer.")
-<Poll: What's the second question?>
-
->>> # If a related_name is given you can't use the field name instead:
->>> Poll.objects.get(choice__name__exact="This is the answer")
-Traceback (most recent call last):
-    ...
-TypeError: Cannot resolve keyword 'choice' into field
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/save_delete_hooks/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,42 +0,0 @@
-"""
-13. Adding hooks before/after saving and deleting
-
-To execute arbitrary code around ``save()`` and ``delete()``, just subclass
-the methods.
-"""
-
-from django.db import models
-
-class Person(models.Model):
-    first_name = models.CharField(maxlength=20)
-    last_name = models.CharField(maxlength=20)
-
-    def __str__(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-    def save(self):
-        print "Before save"
-        super(Person, self).save() # Call the "real" save() method
-        print "After save"
-
-    def delete(self):
-        print "Before deletion"
-        super(Person, self).delete() # Call the "real" delete() method
-        print "After deletion"
-
-__test__ = {'API_TESTS':"""
->>> p1 = Person(first_name='John', last_name='Smith')
->>> p1.save()
-Before save
-After save
-
->>> Person.objects.all()
-[<Person: John Smith>]
-
->>> p1.delete()
-Before deletion
-After deletion
-
->>> Person.objects.all()
-[]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/select_related/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,152 +0,0 @@
-"""
-40. Tests for select_related()
-
-``select_related()`` follows all relationships and pre-caches any foreign key
-values so that complex trees can be fetched in a single query. However, this
-isn't always a good idea, so the ``depth`` argument control how many "levels"
-the select-related behavior will traverse.
-"""
-
-from django.db import models
-
-# Who remembers high school biology?
-
-class Domain(models.Model):
-    name = models.CharField(maxlength=50)
-    def __str__(self):
-        return self.name
-
-class Kingdom(models.Model):
-    name = models.CharField(maxlength=50)
-    domain = models.ForeignKey(Domain)
-    def __str__(self):
-        return self.name
-
-class Phylum(models.Model):
-    name = models.CharField(maxlength=50)
-    kingdom = models.ForeignKey(Kingdom)
-    def __str__(self):
-        return self.name
-    
-class Klass(models.Model):
-    name = models.CharField(maxlength=50)
-    phylum = models.ForeignKey(Phylum)
-    def __str__(self):
-        return self.name
-    
-class Order(models.Model):
-    name = models.CharField(maxlength=50)
-    klass = models.ForeignKey(Klass)
-    def __str__(self):
-        return self.name
-
-class Family(models.Model):
-    name = models.CharField(maxlength=50)
-    order = models.ForeignKey(Order)
-    def __str__(self):
-        return self.name
-
-class Genus(models.Model):
-    name = models.CharField(maxlength=50)
-    family = models.ForeignKey(Family)
-    def __str__(self):
-        return self.name
-
-class Species(models.Model):
-    name = models.CharField(maxlength=50)
-    genus = models.ForeignKey(Genus)
-    def __str__(self):
-        return self.name
-
-def create_tree(stringtree):
-    """Helper to create a complete tree"""
-    names = stringtree.split()
-    models = [Domain, Kingdom, Phylum, Klass, Order, Family, Genus, Species]
-    assert len(names) == len(models), (names, models)
-    
-    parent = None
-    for name, model in zip(names, models):
-        try:
-            obj = model.objects.get(name=name)
-        except model.DoesNotExist:
-            obj = model(name=name)
-        if parent:
-            setattr(obj, parent.__class__.__name__.lower(), parent)
-        obj.save()
-        parent = obj
-
-__test__ = {'API_TESTS':"""
-
-# Set up.
-# The test runner sets settings.DEBUG to False, but we want to gather queries
-# so we'll set it to True here and reset it at the end of the test suite.
->>> from django.conf import settings
->>> settings.DEBUG = True
-
->>> create_tree("Eukaryota Animalia Anthropoda Insecta Diptera Drosophilidae Drosophila melanogaster")
->>> create_tree("Eukaryota Animalia Chordata Mammalia Primates Hominidae Homo sapiens")
->>> create_tree("Eukaryota Plantae Magnoliophyta Magnoliopsida Fabales Fabaceae Pisum sativum")
->>> create_tree("Eukaryota Fungi Basidiomycota Homobasidiomycatae Agaricales Amanitacae Amanita muscaria")
-
->>> from django import db
-
-# Normally, accessing FKs doesn't fill in related objects:
->>> db.reset_queries()
->>> fly = Species.objects.get(name="melanogaster")
->>> fly.genus.family.order.klass.phylum.kingdom.domain
-<Domain: Eukaryota>
->>> len(db.connection.queries)
-8
-
-# However, a select_related() call will fill in those related objects without any extra queries:
->>> db.reset_queries()
->>> person = Species.objects.select_related().get(name="sapiens")
->>> person.genus.family.order.klass.phylum.kingdom.domain
-<Domain: Eukaryota>
->>> len(db.connection.queries)
-1
-
-# select_related() also of course applies to entire lists, not just items.
-# Without select_related()
->>> db.reset_queries()
->>> world = Species.objects.all()
->>> [o.genus.family for o in world]
-[<Family: Drosophilidae>, <Family: Hominidae>, <Family: Fabaceae>, <Family: Amanitacae>]
->>> len(db.connection.queries)
-9
-
-# With select_related():
->>> db.reset_queries()
->>> world = Species.objects.all().select_related()
->>> [o.genus.family for o in world]
-[<Family: Drosophilidae>, <Family: Hominidae>, <Family: Fabaceae>, <Family: Amanitacae>]
->>> len(db.connection.queries)
-1
-
-# The "depth" argument to select_related() will stop the descent at a particular level:
->>> db.reset_queries()
->>> pea = Species.objects.select_related(depth=1).get(name="sativum")
->>> pea.genus.family.order.klass.phylum.kingdom.domain
-<Domain: Eukaryota>
-
-# Notice: one few query than above because of depth=1
->>> len(db.connection.queries)
-7
-
->>> db.reset_queries()
->>> pea = Species.objects.select_related(depth=5).get(name="sativum")
->>> pea.genus.family.order.klass.phylum.kingdom.domain
-<Domain: Eukaryota>
->>> len(db.connection.queries)
-3
-
->>> db.reset_queries()
->>> world = Species.objects.all().select_related(depth=2)
->>> [o.genus.family.order for o in world]
-[<Order: Diptera>, <Order: Primates>, <Order: Fabales>, <Order: Agaricales>]
->>> len(db.connection.queries)
-5
-
-# Reset DEBUG to where we found it.
->>> settings.DEBUG = False
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/serializers/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,162 +0,0 @@
-"""
-41. Serialization
-
-``django.core.serializers`` provides interfaces to converting Django querysets
-to and from "flat" data (i.e. strings).
-"""
-
-from django.db import models
-
-class Category(models.Model):
-    name = models.CharField(maxlength=20)
-
-    class Meta:
-       ordering = ('name',)
-
-    def __str__(self):
-        return self.name
-
-class Author(models.Model):
-    name = models.CharField(maxlength=20)
-
-    class Meta:
-        ordering = ('name',)
-    
-    def __str__(self):
-        return self.name
-
-class Article(models.Model):
-    author = models.ForeignKey(Author)
-    headline = models.CharField(maxlength=50)
-    pub_date = models.DateTimeField()
-    categories = models.ManyToManyField(Category)
-
-    class Meta:
-       ordering = ('pub_date',)
-
-    def __str__(self):
-        return self.headline
-
-class AuthorProfile(models.Model):
-    author = models.OneToOneField(Author)
-    date_of_birth = models.DateField()
-    
-    def __str__(self):
-        return "Profile of %s" % self.author
-
-__test__ = {'API_TESTS':"""
-# Create some data:
->>> from datetime import datetime
->>> sports = Category(name="Sports")
->>> music = Category(name="Music")
->>> op_ed = Category(name="Op-Ed")
->>> sports.save(); music.save(); op_ed.save()
-
->>> joe = Author(name="Joe")
->>> jane = Author(name="Jane")
->>> joe.save(); jane.save()
-
->>> a1 = Article(
-...     author = jane,
-...     headline = "Poker has no place on ESPN",
-...     pub_date = datetime(2006, 6, 16, 11, 00))
->>> a2 = Article(
-...     author = joe,
-...     headline = "Time to reform copyright",
-...     pub_date = datetime(2006, 6, 16, 13, 00))
->>> a1.save(); a2.save()
->>> a1.categories = [sports, op_ed]
->>> a2.categories = [music, op_ed]
-
-# Serialize a queryset to XML
->>> from django.core import serializers
->>> xml = serializers.serialize("xml", Article.objects.all())
-
-# The output is valid XML
->>> from xml.dom import minidom
->>> dom = minidom.parseString(xml)
-
-# Deserializing has a similar interface, except that special DeserializedObject
-# instances are returned.  This is because data might have changed in the 
-# database since the data was serialized (we'll simulate that below).
->>> for obj in serializers.deserialize("xml", xml):
-...     print obj
-<DeserializedObject: Poker has no place on ESPN>
-<DeserializedObject: Time to reform copyright>
-
-# Deserializing data with different field values doesn't change anything in the
-# database until we call save():
->>> xml = xml.replace("Poker has no place on ESPN", "Poker has no place on television")
->>> objs = list(serializers.deserialize("xml", xml))
-
-# Even those I deserialized, the database hasn't been touched
->>> Article.objects.all()
-[<Article: Poker has no place on ESPN>, <Article: Time to reform copyright>]
-
-# But when I save, the data changes as you might except.
->>> objs[0].save()
->>> Article.objects.all()
-[<Article: Poker has no place on television>, <Article: Time to reform copyright>]
-
-# Django also ships with a built-in JSON serializers
->>> json = serializers.serialize("json", Category.objects.filter(pk=2))
->>> json
-'[{"pk": "2", "model": "serializers.category", "fields": {"name": "Music"}}]'
-
-# You can easily create new objects by deserializing data with an empty PK
-# (It's easier to demo this with JSON...)
->>> new_author_json = '[{"pk": null, "model": "serializers.author", "fields": {"name": "Bill"}}]'
->>> for obj in serializers.deserialize("json", new_author_json):
-...     obj.save()
->>> Author.objects.all()
-[<Author: Bill>, <Author: Jane>, <Author: Joe>]
-
-# All the serializers work the same
->>> json = serializers.serialize("json", Article.objects.all())
->>> for obj in serializers.deserialize("json", json):
-...     print obj
-<DeserializedObject: Poker has no place on television>
-<DeserializedObject: Time to reform copyright>
-
->>> json = json.replace("Poker has no place on television", "Just kidding; I love TV poker")
->>> for obj in serializers.deserialize("json", json):
-...     obj.save()
-
->>> Article.objects.all()
-[<Article: Just kidding; I love TV poker>, <Article: Time to reform copyright>]
-
-# If you use your own primary key field (such as a OneToOneField), 
-# it doesn't appear in the serialized field list - it replaces the
-# pk identifier.
->>> profile = AuthorProfile(author=joe, date_of_birth=datetime(1970,1,1))
->>> profile.save()
-
->>> json = serializers.serialize("json", AuthorProfile.objects.all())
->>> json
-'[{"pk": "1", "model": "serializers.authorprofile", "fields": {"date_of_birth": "1970-01-01"}}]'
-
->>> for obj in serializers.deserialize("json", json):
-...     print obj
-<DeserializedObject: Profile of Joe>
-
-# Objects ids can be referenced before they are defined in the serialization data
-# However, the deserialization process will need to be contained within a transaction
->>> json = '[{"pk": "3", "model": "serializers.article", "fields": {"headline": "Forward references pose no problem", "pub_date": "2006-06-16 15:00:00", "categories": [4, 1], "author": 4}}, {"pk": "4", "model": "serializers.category", "fields": {"name": "Reference"}}, {"pk": "4", "model": "serializers.author", "fields": {"name": "Agnes"}}]'
->>> from django.db import transaction
->>> transaction.enter_transaction_management()
->>> transaction.managed(True)
->>> for obj in serializers.deserialize("json", json):
-...     obj.save()
-
->>> transaction.commit()
->>> transaction.leave_transaction_management()
-
->>> article = Article.objects.get(pk=3)
->>> article
-<Article: Forward references pose no problem>
->>> article.categories.all()
-[<Category: Reference>, <Category: Sports>]
->>> article.author
-<Author: Agnes>
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/str/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,31 +0,0 @@
-"""
-2. Adding __str__() to models
-
-Although it's not a strict requirement, each model should have a ``__str__()``
-method to return a "human-readable" representation of the object. Do this not
-only for your own sanity when dealing with the interactive prompt, but also
-because objects' representations are used throughout Django's
-automatically-generated admin.
-"""
-
-from django.db import models
-
-class Article(models.Model):
-    headline = models.CharField(maxlength=100)
-    pub_date = models.DateTimeField()
-
-    def __str__(self):
-        return self.headline
-
-__test__ = {'API_TESTS':"""
-# Create an Article.
->>> from datetime import datetime
->>> a = Article(headline='Area man programs in Python', pub_date=datetime(2005, 7, 28))
->>> a.save()
-
->>> str(a)
-'Area man programs in Python'
-
->>> a
-<Article: Area man programs in Python>
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/test_client/fixtures/testdata.json	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-[
-    {
-        "pk": "1", 
-        "model": "auth.user", 
-        "fields": {
-            "username": "testclient", 
-            "first_name": "Test", 
-            "last_name": "Client", 
-            "is_active": true, 
-            "is_superuser": false, 
-            "is_staff": false, 
-            "last_login": "2006-12-17 07:03:31", 
-            "groups": [], 
-            "user_permissions": [], 
-            "password": "sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161", 
-            "email": "testclient@example.com", 
-            "date_joined": "2006-12-17 07:03:31"
-        }
-    }
-]
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/test_client/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,172 +0,0 @@
-"""
-38. Testing using the Test Client
-
-The test client is a class that can act like a simple
-browser for testing purposes.
-  
-It allows the user to compose GET and POST requests, and
-obtain the response that the server gave to those requests.
-The server Response objects are annotated with the details
-of the contexts and templates that were rendered during the
-process of serving the request.
-
-Client objects are stateful - they will retain cookie (and
-thus session) details for the lifetime of the Client instance.
-
-This is not intended as a replacement for Twill,Selenium, or
-other browser automation frameworks - it is here to allow 
-testing against the contexts and templates produced by a view, 
-rather than the HTML rendered to the end-user.
-
-"""
-from django.test import Client, TestCase
-
-class ClientTest(TestCase):
-    fixtures = ['testdata.json']
-    
-    def setUp(self):
-        "Set up test environment"
-        self.client = Client()
-        
-    def test_get_view(self):
-        "GET a view"
-        response = self.client.get('/test_client/get_view/')
-        
-        # Check some response details
-        self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.context['var'], 42)
-        self.assertEqual(response.template.name, 'GET Template')
-        self.failUnless('This is a test.' in response.content)
-
-    def test_get_post_view(self):
-        "GET a view that normally expects POSTs"
-        response = self.client.get('/test_client/post_view/', {})
-        
-        # Check some response details
-        self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.template.name, 'Empty GET Template')
-        
-    def test_empty_post(self):
-        "POST an empty dictionary to a view"
-        response = self.client.post('/test_client/post_view/', {})
-        
-        # Check some response details
-        self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.template.name, 'Empty POST Template')
-        
-    def test_post(self):
-        "POST some data to a view"
-        post_data = {
-            'value': 37
-        }
-        response = self.client.post('/test_client/post_view/', post_data)
-        
-        # Check some response details
-        self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.context['data'], '37')
-        self.assertEqual(response.template.name, 'POST Template')
-        self.failUnless('Data received' in response.content)
-        
-    def test_raw_post(self):
-        test_doc = """<?xml version="1.0" encoding="utf-8"?><library><book><title>Blink</title><author>Malcolm Gladwell</author></book></library>"""
-        response = self.client.post("/test_client/raw_post_view/", test_doc,
-                                    content_type="text/xml")
-        self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.template.name, "Book template")
-        self.assertEqual(response.content, "Blink - Malcolm Gladwell")
-
-    def test_redirect(self):
-        "GET a URL that redirects elsewhere"
-        response = self.client.get('/test_client/redirect_view/')
-        
-        # Check that the response was a 302 (redirect)
-        self.assertEqual(response.status_code, 302)
-
-    def test_valid_form(self):
-        "POST valid data to a form"
-        post_data = {
-            'text': 'Hello World',
-            'email': 'foo@example.com',
-            'value': 37,
-            'single': 'b',
-            'multi': ('b','c','e')
-        }
-        response = self.client.post('/test_client/form_view/', post_data)
-        self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.template.name, "Valid POST Template")
-
-    def test_incomplete_data_form(self):
-        "POST incomplete data to a form"
-        post_data = {
-            'text': 'Hello World',
-            'value': 37            
-        }
-        response = self.client.post('/test_client/form_view/', post_data)
-        self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.template.name, "Invalid POST Template")
-
-    def test_form_error(self):
-        "POST erroneous data to a form"
-        post_data = {
-            'text': 'Hello World',
-            'email': 'not an email address',
-            'value': 37,
-            'single': 'b',
-            'multi': ('b','c','e')
-        }
-        response = self.client.post('/test_client/form_view/', post_data)
-        self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.template.name, "Invalid POST Template")
-        
-    def test_unknown_page(self):
-        "GET an invalid URL"
-        response = self.client.get('/test_client/unknown_view/')
-        
-        # Check that the response was a 404
-        self.assertEqual(response.status_code, 404)
-        
-    def test_view_with_login(self):
-        "Request a page that is protected with @login_required"
-        
-        # Get the page without logging in. Should result in 302.
-        response = self.client.get('/test_client/login_protected_view/')
-        self.assertEqual(response.status_code, 302)
-        
-        # Request a page that requires a login
-        response = self.client.login('/test_client/login_protected_view/', 'testclient', 'password')
-        self.failUnless(response)
-        self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.context['user'].username, 'testclient')
-        self.assertEqual(response.template.name, 'Login Template')
-
-    def test_view_with_bad_login(self):
-        "Request a page that is protected with @login, but use bad credentials"
-
-        response = self.client.login('/test_client/login_protected_view/', 'otheruser', 'nopassword')
-        self.failIf(response)
-
-    def test_session_modifying_view(self):
-        "Request a page that modifies the session"
-        # Session value isn't set initially
-        try:
-            self.client.session['tobacconist']
-            self.fail("Shouldn't have a session value")
-        except KeyError:
-            pass
-        
-        from django.contrib.sessions.models import Session
-        response = self.client.post('/test_client/session_view/')
-        
-        # Check that the session was modified
-        self.assertEquals(self.client.session['tobacconist'], 'hovercraft')
-
-    def test_view_with_exception(self):
-        "Request a page that is known to throw an error"
-        self.assertRaises(KeyError, self.client.get, "/test_client/broken_view/")
-        
-        #Try the same assertion, a different way
-        try:
-            self.client.get('/test_client/broken_view/')
-            self.fail('Should raise an error')
-        except KeyError:
-            pass
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/test_client/urls.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-from django.conf.urls.defaults import *
-import views
-
-urlpatterns = patterns('',
-    (r'^get_view/$', views.get_view),
-    (r'^post_view/$', views.post_view),
-    (r'^raw_post_view/$', views.raw_post_view),
-    (r'^redirect_view/$', views.redirect_view),
-    (r'^form_view/$', views.form_view),
-    (r'^login_protected_view/$', views.login_protected_view),
-    (r'^session_view/$', views.session_view),
-    (r'^broken_view/$', views.broken_view)
-)
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/test_client/views.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,102 +0,0 @@
-from xml.dom.minidom import parseString
-from django.template import Context, Template
-from django.http import HttpResponse, HttpResponseRedirect
-from django.contrib.auth.decorators import login_required
-from django.newforms.forms import Form
-from django.newforms import fields
-
-def get_view(request):
-    "A simple view that expects a GET request, and returns a rendered template"
-    t = Template('This is a test. {{ var }} is the value.', name='GET Template')
-    c = Context({'var': 42})
-    
-    return HttpResponse(t.render(c))
-
-def post_view(request):
-    """A view that expects a POST, and returns a different template depending
-    on whether any POST data is available
-    """
-    if request.method == 'POST':
-        if request.POST:
-            t = Template('Data received: {{ data }} is the value.', name='POST Template')
-            c = Context({'data': request.POST['value']})
-        else:
-            t = Template('Viewing POST page.', name='Empty POST Template')
-            c = Context()
-    else:
-        t = Template('Viewing GET page.', name='Empty GET Template')
-        c = Context()
-    
-    return HttpResponse(t.render(c))
-    
-def raw_post_view(request):
-    """A view which expects raw XML to be posted and returns content extracted
-    from the XML"""
-    if request.method == 'POST':
-        root = parseString(request.raw_post_data)
-        first_book = root.firstChild.firstChild
-        title, author = [n.firstChild.nodeValue for n in first_book.childNodes]
-        t = Template("{{ title }} - {{ author }}", name="Book template")
-        c = Context({"title": title, "author": author})
-    else:
-        t = Template("GET request.", name="Book GET template")
-        c = Context()
-
-    return HttpResponse(t.render(c))
-
-def redirect_view(request):
-    "A view that redirects all requests to the GET view"
-    return HttpResponseRedirect('/test_client/get_view/')
-
-TestChoices = (
-    ('a', 'First Choice'),
-    ('b', 'Second Choice'),
-    ('c', 'Third Choice'),
-    ('d', 'Fourth Choice'),
-    ('e', 'Fifth Choice')
-)
-
-class TestForm(Form):
-    text = fields.CharField()
-    email = fields.EmailField()
-    value = fields.IntegerField()
-    single = fields.ChoiceField(choices=TestChoices)
-    multi = fields.MultipleChoiceField(choices=TestChoices)
-    
-def form_view(request):
-    "A view that tests a simple form"
-    if request.method == 'POST':
-        form = TestForm(request.POST)
-        if form.is_valid():
-            t = Template('Valid POST data.', name='Valid POST Template')
-            c = Context()
-        else:
-            t = Template('Invalid POST data. {{ form.errors }}', name='Invalid POST Template')
-            c = Context({'form': form})
-    else:
-        form = TestForm()
-        t = Template('Viewing base form. {{ form }}.', name='Form GET Template')
-        c = Context({'form': form})
-    
-    return HttpResponse(t.render(c))
-        
-def login_protected_view(request):
-    "A simple view that is login protected."
-    t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template')
-    c = Context({'user': request.user})
-    
-    return HttpResponse(t.render(c))
-login_protected_view = login_required(login_protected_view)
-
-def session_view(request):
-    "A view that modifies the session"
-    request.session['tobacconist'] = 'hovercraft'
-    
-    t = Template('This is a view that modifies the session.', 
-                 name='Session Modifying View Template')
-    c = Context()
-    return HttpResponse(t.render(c))
-
-def broken_view(request):
-    """A view which just raises an exception, simulating a broken view."""
-    raise KeyError("Oops! Looks like you wrote some bad code.")
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/transactions/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,99 +0,0 @@
-"""
-15. Transactions
-
-Django handles transactions in three different ways. The default is to commit
-each transaction upon a write, but you can decorate a function to get
-commit-on-success behavior. Alternatively, you can manage the transaction
-manually.
-"""
-
-from django.db import models
-
-class Reporter(models.Model):
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    email = models.EmailField()
-
-    def __str__(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-__test__ = {'API_TESTS':"""
->>> from django.db import connection, transaction
-"""}
-
-from django.conf import settings
-
-building_docs = getattr(settings, 'BUILDING_DOCS', False)
-
-if building_docs or settings.DATABASE_ENGINE != 'mysql':
-    __test__['API_TESTS'] += """
-# the default behavior is to autocommit after each save() action
->>> def create_a_reporter_then_fail(first, last):
-...     a = Reporter(first_name=first, last_name=last)
-...     a.save()
-...     raise Exception("I meant to do that")
-...
->>> create_a_reporter_then_fail("Alice", "Smith")
-Traceback (most recent call last):
-    ...
-Exception: I meant to do that
-
-# The object created before the exception still exists
->>> Reporter.objects.all()
-[<Reporter: Alice Smith>]
-
-# the autocommit decorator works exactly the same as the default behavior
->>> autocomitted_create_then_fail = transaction.autocommit(create_a_reporter_then_fail)
->>> autocomitted_create_then_fail("Ben", "Jones")
-Traceback (most recent call last):
-    ...
-Exception: I meant to do that
-
-# Same behavior as before
->>> Reporter.objects.all()
-[<Reporter: Alice Smith>, <Reporter: Ben Jones>]
-
-# With the commit_on_success decorator, the transaction is only comitted if the
-# function doesn't throw an exception
->>> committed_on_success = transaction.commit_on_success(create_a_reporter_then_fail)
->>> committed_on_success("Carol", "Doe")
-Traceback (most recent call last):
-    ...
-Exception: I meant to do that
-
-# This time the object never got saved
->>> Reporter.objects.all()
-[<Reporter: Alice Smith>, <Reporter: Ben Jones>]
-
-# If there aren't any exceptions, the data will get saved
->>> def remove_a_reporter():
-...     r = Reporter.objects.get(first_name="Alice")
-...     r.delete()
-...
->>> remove_comitted_on_success = transaction.commit_on_success(remove_a_reporter)
->>> remove_comitted_on_success()
->>> Reporter.objects.all()
-[<Reporter: Ben Jones>]
-
-# You can manually manage transactions if you really want to, but you
-# have to remember to commit/rollback
->>> def manually_managed():
-...     r = Reporter(first_name="Carol", last_name="Doe")
-...     r.save()
-...     transaction.commit()
->>> manually_managed = transaction.commit_manually(manually_managed)
->>> manually_managed()
->>> Reporter.objects.all()
-[<Reporter: Ben Jones>, <Reporter: Carol Doe>]
-
-# If you forget, you'll get bad errors
->>> def manually_managed_mistake():
-...     r = Reporter(first_name="David", last_name="Davidson")
-...     r.save()
-...     # oops, I forgot to commit/rollback!
->>> manually_managed_mistake = transaction.commit_manually(manually_managed_mistake)
->>> manually_managed_mistake()
-Traceback (most recent call last):
-    ...
-TransactionManagementError: Transaction managed block ended with pending COMMIT/ROLLBACK
-"""
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/modeltests/validation/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,153 +0,0 @@
-"""
-31. Validation
-
-This is an experimental feature!
-
-Each model instance has a validate() method that returns a dictionary of
-validation errors in the instance's fields. This method has a side effect
-of converting each field to its appropriate Python data type.
-"""
-
-from django.db import models
-
-class Person(models.Model):
-    is_child = models.BooleanField()
-    name = models.CharField(maxlength=20)
-    birthdate = models.DateField()
-    favorite_moment = models.DateTimeField()
-    email = models.EmailField()
-
-    def __str__(self):
-        return self.name
-
-__test__ = {'API_TESTS':"""
-
->>> import datetime
->>> valid_params = {
-...     'is_child': True,
-...     'name': 'John',
-...     'birthdate': datetime.date(2000, 5, 3),
-...     'favorite_moment': datetime.datetime(2002, 4, 3, 13, 23),
-...     'email': 'john@example.com'
-... }
->>> p = Person(**valid_params)
->>> p.validate()
-{}
-
->>> p = Person(**dict(valid_params, id='23'))
->>> p.validate()
-{}
->>> p.id
-23
-
->>> p = Person(**dict(valid_params, id='foo'))
->>> p.validate()
-{'id': ['This value must be an integer.']}
-
->>> p = Person(**dict(valid_params, id=None))
->>> p.validate()
-{}
->>> repr(p.id)
-'None'
-
->>> p = Person(**dict(valid_params, is_child='t'))
->>> p.validate()
-{}
->>> p.is_child
-True
-
->>> p = Person(**dict(valid_params, is_child='f'))
->>> p.validate()
-{}
->>> p.is_child
-False
-
->>> p = Person(**dict(valid_params, is_child=True))
->>> p.validate()
-{}
->>> p.is_child
-True
-
->>> p = Person(**dict(valid_params, is_child=False))
->>> p.validate()
-{}
->>> p.is_child
-False
-
->>> p = Person(**dict(valid_params, is_child='foo'))
->>> p.validate()
-{'is_child': ['This value must be either True or False.']}
-
->>> p = Person(**dict(valid_params, name=u'Jose'))
->>> p.validate()
-{}
->>> p.name
-u'Jose'
-
->>> p = Person(**dict(valid_params, name=227))
->>> p.validate()
-{}
->>> p.name
-'227'
-
->>> p = Person(**dict(valid_params, birthdate=datetime.date(2000, 5, 3)))
->>> p.validate()
-{}
->>> p.birthdate
-datetime.date(2000, 5, 3)
-
->>> p = Person(**dict(valid_params, birthdate=datetime.datetime(2000, 5, 3)))
->>> p.validate()
-{}
->>> p.birthdate
-datetime.date(2000, 5, 3)
-
->>> p = Person(**dict(valid_params, birthdate='2000-05-03'))
->>> p.validate()
-{}
->>> p.birthdate
-datetime.date(2000, 5, 3)
-
->>> p = Person(**dict(valid_params, birthdate='2000-5-3'))
->>> p.validate()
-{}
->>> p.birthdate
-datetime.date(2000, 5, 3)
-
->>> p = Person(**dict(valid_params, birthdate='foo'))
->>> p.validate()
-{'birthdate': ['Enter a valid date in YYYY-MM-DD format.']}
-
->>> p = Person(**dict(valid_params, favorite_moment=datetime.datetime(2002, 4, 3, 13, 23)))
->>> p.validate()
-{}
->>> p.favorite_moment
-datetime.datetime(2002, 4, 3, 13, 23)
-
->>> p = Person(**dict(valid_params, favorite_moment=datetime.datetime(2002, 4, 3)))
->>> p.validate()
-{}
->>> p.favorite_moment
-datetime.datetime(2002, 4, 3, 0, 0)
-
->>> p = Person(**dict(valid_params, email='john@example.com'))
->>> p.validate()
-{}
->>> p.email
-'john@example.com'
-
->>> p = Person(**dict(valid_params, email=u'john@example.com'))
->>> p.validate()
-{}
->>> p.email
-u'john@example.com'
-
->>> p = Person(**dict(valid_params, email=22))
->>> p.validate()
-{'email': ['Enter a valid e-mail address.']}
-
-# Make sure that Date and DateTime return validation errors and don't raise Python errors.
->>> Person(name='John Doe', is_child=True, email='abc@def.com').validate()
-{'favorite_moment': ['This field is required.'], 'birthdate': ['This field is required.']}
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/bug639/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,16 +0,0 @@
-import tempfile
-from django.db import models
-
-class Photo(models.Model):
-    title = models.CharField(maxlength=30)
-    image = models.FileField(upload_to=tempfile.gettempdir())
-    
-    # Support code for the tests; this keeps track of how many times save() gets
-    # called on each instance.
-    def __init__(self, *args, **kwargs):
-       super(Photo, self).__init__(*args, **kwargs)
-       self._savecount = 0
-    
-    def save(self):
-        super(Photo, self).save()
-        self._savecount +=1
\ No newline at end of file
Binary file thirdparty/google_appengine/lib/django/tests/regressiontests/bug639/test.jpg has changed
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/bug639/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,42 +0,0 @@
-"""
-Tests for file field behavior, and specifically #639, in which Model.save() gets
-called *again* for each FileField. This test will fail if calling an
-auto-manipulator's save() method causes Model.save() to be called more than once.
-"""
-
-import os
-import unittest
-from regressiontests.bug639.models import Photo
-from django.http import QueryDict
-from django.utils.datastructures import MultiValueDict
-
-class Bug639Test(unittest.TestCase):
-        
-    def testBug639(self):
-        """
-        Simulate a file upload and check how many times Model.save() gets called.
-        """
-        # Grab an image for testing
-        img = open(os.path.join(os.path.dirname(__file__), "test.jpg"), "rb").read()
-        
-        # Fake a request query dict with the file
-        qd = QueryDict("title=Testing&image=", mutable=True)
-        qd["image_file"] = {
-            "filename" : "test.jpg",
-            "content-type" : "image/jpeg",
-            "content" : img
-        }
-        
-        manip = Photo.AddManipulator()
-        manip.do_html2python(qd)
-        p = manip.save(qd)
-        
-        # Check the savecount stored on the object (see the model)
-        self.assertEqual(p._savecount, 1)
-        
-    def tearDown(self):
-        """
-        Make sure to delete the "uploaded" file to avoid clogging /tmp.
-        """
-        p = Photo.objects.get()
-        os.unlink(p.get_image_filename())
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/cache/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,71 +0,0 @@
-# Unit tests for cache framework
-# Uses whatever cache backend is set in the test settings file.
-
-from django.core.cache import cache
-import time, unittest
-
-# functions/classes for complex data type tests        
-def f():
-    return 42
-class C:
-    def m(n):
-        return 24
-
-class Cache(unittest.TestCase):
-    def test_simple(self):
-        # simple set/get
-        cache.set("key", "value")
-        self.assertEqual(cache.get("key"), "value")
-
-    def test_non_existent(self):
-        # get with non-existent keys
-        self.assertEqual(cache.get("does not exist"), None)
-        self.assertEqual(cache.get("does not exist", "bang!"), "bang!")
-
-    def test_get_many(self):
-        # get_many
-        cache.set('a', 'a')
-        cache.set('b', 'b')
-        cache.set('c', 'c')
-        cache.set('d', 'd')
-        self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a' : 'a', 'c' : 'c', 'd' : 'd'})
-        self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a' : 'a', 'b' : 'b'})
-
-    def test_delete(self):
-        # delete
-        cache.set("key1", "spam")
-        cache.set("key2", "eggs")
-        self.assertEqual(cache.get("key1"), "spam")
-        cache.delete("key1")
-        self.assertEqual(cache.get("key1"), None)
-        self.assertEqual(cache.get("key2"), "eggs")
-
-    def test_has_key(self):
-        # has_key
-        cache.set("hello", "goodbye")
-        self.assertEqual(cache.has_key("hello"), True)
-        self.assertEqual(cache.has_key("goodbye"), False)
-
-    def test_data_types(self):
-        # test data types
-        stuff = {
-            'string'    : 'this is a string',
-            'int'       : 42,
-            'list'      : [1, 2, 3, 4],
-            'tuple'     : (1, 2, 3, 4),
-            'dict'      : {'A': 1, 'B' : 2},
-            'function'  : f,
-            'class'     : C,
-        }
-        for (key, value) in stuff.items():
-            cache.set(key, value)
-            self.assertEqual(cache.get(key), value)
-    
-    def test_expiration(self):
-        # expiration
-        cache.set('expire', 'very quickly', 1)
-        time.sleep(2)
-        self.assertEqual(cache.get("expire"), None)
-
-if __name__ == '__main__':
-    unittest.main()
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/datastructures/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,65 +0,0 @@
-"""
-# Tests for stuff in django.utils.datastructures.
-
->>> from django.utils.datastructures import *
-
-### MergeDict #################################################################
-
->>> d1 = {'chris':'cool','camri':'cute','cotton':'adorable','tulip':'snuggable', 'twoofme':'firstone'}
->>> d2 = {'chris2':'cool2','camri2':'cute2','cotton2':'adorable2','tulip2':'snuggable2'}
->>> d3 = {'chris3':'cool3','camri3':'cute3','cotton3':'adorable3','tulip3':'snuggable3'}
->>> d4 = {'twoofme':'secondone'}
->>> md = MergeDict( d1,d2,d3 )
->>> md['chris']
-'cool'
->>> md['camri']
-'cute'
->>> md['twoofme']
-'firstone'
->>> md2 = md.copy()
->>> md2['chris']
-'cool'
-
-### MultiValueDict ##########################################################
-
->>> d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']})
->>> d['name']
-'Simon'
->>> d.getlist('name')
-['Adrian', 'Simon']
->>> d.get('lastname', 'nonexistent')
-'nonexistent'
->>> d.setlist('lastname', ['Holovaty', 'Willison'])
-
-### SortedDict #################################################################
-
->>> d = SortedDict()
->>> d['one'] = 'one'
->>> d['two'] = 'two'
->>> d['three'] = 'three'
->>> d['one']
-'one'
->>> d['two']
-'two'
->>> d['three']
-'three'
->>> d.keys()
-['one', 'two', 'three']
->>> d.values()
-['one', 'two', 'three']
->>> d['one'] = 'not one'
->>> d['one']
-'not one'
->>> d.keys() == d.copy().keys()
-True
-
-### DotExpandedDict ############################################################
-
->>> d = DotExpandedDict({'person.1.firstname': ['Simon'], 'person.1.lastname': ['Willison'], 'person.2.firstname': ['Adrian'], 'person.2.lastname': ['Holovaty']})
->>> d['person']['1']['lastname']
-['Willison']
->>> d['person']['2']['lastname']
-['Holovaty']
->>> d['person']['2']['firstname']
-['Adrian']
-"""
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/dateformat/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,86 +0,0 @@
-r"""
->>> format(my_birthday, '')
-''
->>> format(my_birthday, 'a')
-'p.m.'
->>> format(my_birthday, 'A')
-'PM'
->>> format(my_birthday, 'd')
-'08'
->>> format(my_birthday, 'j')
-'8'
->>> format(my_birthday, 'l')
-'Sunday'
->>> format(my_birthday, 'L')
-'False'
->>> format(my_birthday, 'm')
-'07'
->>> format(my_birthday, 'M')
-'Jul'
->>> format(my_birthday, 'b')
-'jul'
->>> format(my_birthday, 'n')
-'7'
->>> format(my_birthday, 'N')
-'July'
->>> no_tz or format(my_birthday, 'O') == '+0100'
-True
->>> format(my_birthday, 'P')
-'10 p.m.'
->>> no_tz or format(my_birthday, 'r') == 'Sun, 8 Jul 1979 22:00:00 +0100'
-True
->>> format(my_birthday, 's')
-'00'
->>> format(my_birthday, 'S')
-'th'
->>> format(my_birthday, 't')
-'31'
->>> no_tz or format(my_birthday, 'T') == 'CET'
-True
->>> no_tz or format(my_birthday, 'U') == '300531600'
-True
->>> format(my_birthday, 'w')
-'0'
->>> format(my_birthday, 'W')
-'27'
->>> format(my_birthday, 'y')
-'79'
->>> format(my_birthday, 'Y')
-'1979'
->>> format(my_birthday, 'z')
-'189'
->>> no_tz or format(my_birthday, 'Z') == '3600'
-True
-
->>> no_tz or format(summertime, 'I') == '1'
-True
->>> no_tz or format(summertime, 'O') == '+0200'
-True
->>> no_tz or format(wintertime, 'I') == '0'
-True
->>> no_tz or format(wintertime, 'O') == '+0100'
-True
-
->>> format(my_birthday, r'Y z \C\E\T')
-'1979 189 CET'
-
->>> format(my_birthday, r'jS o\f F')
-'8th of July'
-"""
-
-from django.utils import dateformat, translation
-import datetime, os, time
-
-format = dateformat.format
-os.environ['TZ'] = 'Europe/Copenhagen'
-translation.activate('en-us')
-
-try:
-    time.tzset()
-    no_tz = False
-except AttributeError:
-    no_tz = True
-
-my_birthday = datetime.datetime(1979, 7, 8, 22, 00)
-summertime = datetime.datetime(2005, 10, 30, 1, 00)
-wintertime = datetime.datetime(2005, 10, 30, 4, 00)
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/db_typecasts/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,56 +0,0 @@
-# Unit tests for typecast functions in django.db.backends.util
-
-from django.db.backends import util as typecasts
-import datetime, unittest
-
-TEST_CASES = {
-    'typecast_date': (
-        ('', None),
-        (None, None),
-        ('2005-08-11', datetime.date(2005, 8, 11)),
-        ('1990-01-01', datetime.date(1990, 1, 1)),
-    ),
-    'typecast_time': (
-        ('', None),
-        (None, None),
-        ('0:00:00', datetime.time(0, 0)),
-        ('0:30:00', datetime.time(0, 30)),
-        ('8:50:00', datetime.time(8, 50)),
-        ('08:50:00', datetime.time(8, 50)),
-        ('12:00:00', datetime.time(12, 00)),
-        ('12:30:00', datetime.time(12, 30)),
-        ('13:00:00', datetime.time(13, 00)),
-        ('23:59:00', datetime.time(23, 59)),
-        ('00:00:12', datetime.time(0, 0, 12)),
-        ('00:00:12.5', datetime.time(0, 0, 12, 500000)),
-        ('7:22:13.312', datetime.time(7, 22, 13, 312000)),
-    ),
-    'typecast_timestamp': (
-        ('', None),
-        (None, None),
-        ('2005-08-11 0:00:00', datetime.datetime(2005, 8, 11)),
-        ('2005-08-11 0:30:00', datetime.datetime(2005, 8, 11, 0, 30)),
-        ('2005-08-11 8:50:30', datetime.datetime(2005, 8, 11, 8, 50, 30)),
-        ('2005-08-11 8:50:30.123', datetime.datetime(2005, 8, 11, 8, 50, 30, 123000)),
-        ('2005-08-11 8:50:30.9', datetime.datetime(2005, 8, 11, 8, 50, 30, 900000)),
-        ('2005-08-11 8:50:30.312-05', datetime.datetime(2005, 8, 11, 8, 50, 30, 312000)),
-        ('2005-08-11 8:50:30.312+02', datetime.datetime(2005, 8, 11, 8, 50, 30, 312000)),
-    ),
-    'typecast_boolean': (
-        (None, None),
-        ('', False),
-        ('t', True),
-        ('f', False),
-        ('x', False),
-    ),
-}
-
-class DBTypeCasts(unittest.TestCase):
-    def test_typeCasts(self):
-        for k, v in TEST_CASES.items():
-            for inpt, expected in v:
-                got = getattr(typecasts, k)(inpt)
-                assert got == expected, "In %s: %r doesn't match %r. Got %r instead." % (k, inpt, expected, got)
-
-if __name__ == '__main__':
-    unittest.main()
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/defaultfilters/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,452 +0,0 @@
-r"""
->>> floatformat(7.7)
-'7.7'
->>> floatformat(7.0)
-'7'
->>> floatformat(0.7)
-'0.7'
->>> floatformat(0.07)
-'0.1'
->>> floatformat(0.007)
-'0.0'
->>> floatformat(0.0)
-'0'
->>> floatformat(7.7,3)
-'7.700'
->>> floatformat(6.000000,3)
-'6.000'
->>> floatformat(13.1031,-3)
-'13.103'
->>> floatformat(11.1197, -2)
-'11.12'
->>> floatformat(11.0000, -2)
-'11'
->>> floatformat(11.000001, -2)
-'11.00'
->>> floatformat(8.2798, 3)
-'8.280'
->>> floatformat('foo')
-''
->>> floatformat(13.1031, 'bar')
-'13.1031'
->>> floatformat('foo', 'bar')
-''
-
->>> addslashes('"double quotes" and \'single quotes\'')
-'\\"double quotes\\" and \\\'single quotes\\\''
-
->>> addslashes(r'\ : backslashes, too')
-'\\\\ : backslashes, too'
-
->>> capfirst('hello world')
-'Hello world'
-
->>> fix_ampersands('Jack & Jill & Jeroboam')
-'Jack &amp; Jill &amp; Jeroboam'
-
->>> linenumbers('line 1\nline 2')
-'1. line 1\n2. line 2'
-
->>> linenumbers('\n'.join(['x'] * 10))
-'01. x\n02. x\n03. x\n04. x\n05. x\n06. x\n07. x\n08. x\n09. x\n10. x'
-
->>> lower('TEST')
-'test'
-
->>> lower(u'\xcb') # uppercase E umlaut
-u'\xeb'
-
->>> make_list('abc')
-['a', 'b', 'c']
-
->>> make_list(1234)
-['1', '2', '3', '4']
-
->>> slugify(' Jack & Jill like numbers 1,2,3 and 4 and silly characters ?%.$!/')
-'jack-jill-like-numbers-123-and-4-and-silly-characters'
-
->>> stringformat(1, '03d')
-'001'
-
->>> stringformat(1, 'z')
-''
-
->>> title('a nice title, isn\'t it?')
-"A Nice Title, Isn't It?"
-
-
->>> truncatewords('A sentence with a few words in it', 1)
-'A ...'
-
->>> truncatewords('A sentence with a few words in it', 5)
-'A sentence with a few ...'
-
->>> truncatewords('A sentence with a few words in it', 100)
-'A sentence with a few words in it'
-
->>> truncatewords('A sentence with a few words in it', 'not a number')
-'A sentence with a few words in it'
-
->>> truncatewords_html('<p>one <a href="#">two - three <br>four</a> five</p>', 0) 
-''
- 
->>> truncatewords_html('<p>one <a href="#">two - three <br>four</a> five</p>', 2) 
-'<p>one <a href="#">two ...</a></p>'
- 
->>> truncatewords_html('<p>one <a href="#">two - three <br>four</a> five</p>', 4) 
-'<p>one <a href="#">two - three <br>four ...</a></p>'
-
->>> truncatewords_html('<p>one <a href="#">two - three <br>four</a> five</p>', 5) 
-'<p>one <a href="#">two - three <br>four</a> five</p>'
-
->>> truncatewords_html('<p>one <a href="#">two - three <br>four</a> five</p>', 100) 
-'<p>one <a href="#">two - three <br>four</a> five</p>'
-
->>> upper('Mixed case input')
-'MIXED CASE INPUT'
-
->>> upper(u'\xeb') # lowercase e umlaut
-u'\xcb'
-
-
->>> urlencode('jack & jill')
-'jack%20%26%20jill'
->>> urlencode(1)
-'1'
-
-
->>> urlizetrunc('http://short.com/', 20)
-'<a href="http://short.com/" rel="nofollow">http://short.com/</a>'
-
->>> urlizetrunc('http://www.google.co.uk/search?hl=en&q=some+long+url&btnG=Search&meta=', 20)
-'<a href="http://www.google.co.uk/search?hl=en&q=some+long+url&btnG=Search&meta=" rel="nofollow">http://www.google.co...</a>'
-
->>> wordcount('')
-0
-
->>> wordcount('oneword')
-1
-
->>> wordcount('lots of words')
-3
-
->>> wordwrap('this is a long paragraph of text that really needs to be wrapped I\'m afraid', 14)
-"this is a long\nparagraph of\ntext that\nreally needs\nto be wrapped\nI'm afraid"
-
->>> wordwrap('this is a short paragraph of text.\n  But this line should be indented',14)
-'this is a\nshort\nparagraph of\ntext.\n  But this\nline should be\nindented'
-
->>> wordwrap('this is a short paragraph of text.\n  But this line should be indented',15)
-'this is a short\nparagraph of\ntext.\n  But this line\nshould be\nindented'
-
->>> ljust('test', 10)
-'test      '
-
->>> ljust('test', 3)
-'test'
-
->>> rjust('test', 10)
-'      test'
-
->>> rjust('test', 3)
-'test'
-
->>> center('test', 6)
-' test '
-
->>> cut('a string to be mangled', 'a')
-' string to be mngled'
-
->>> cut('a string to be mangled', 'ng')
-'a stri to be maled'
-
->>> cut('a string to be mangled', 'strings')
-'a string to be mangled'
-
->>> escape('<some html & special characters > here')
-'&lt;some html &amp; special characters &gt; here'
-
->>> linebreaks('line 1')
-'<p>line 1</p>'
-
->>> linebreaks('line 1\nline 2')
-'<p>line 1<br />line 2</p>'
-
->>> removetags('some <b>html</b> with <script>alert("You smell")</script> disallowed <img /> tags', 'script img')
-'some <b>html</b> with alert("You smell") disallowed  tags'
-
->>> striptags('some <b>html</b> with <script>alert("You smell")</script> disallowed <img /> tags')
-'some html with alert("You smell") disallowed  tags'
-
->>> dictsort([{'age': 23, 'name': 'Barbara-Ann'},
-...           {'age': 63, 'name': 'Ra Ra Rasputin'},
-...           {'name': 'Jonny B Goode', 'age': 18}], 'age')
-[{'age': 18, 'name': 'Jonny B Goode'}, {'age': 23, 'name': 'Barbara-Ann'}, {'age': 63, 'name': 'Ra Ra Rasputin'}]
-
->>> dictsortreversed([{'age': 23, 'name': 'Barbara-Ann'},
-...           {'age': 63, 'name': 'Ra Ra Rasputin'},
-...           {'name': 'Jonny B Goode', 'age': 18}], 'age')
-[{'age': 63, 'name': 'Ra Ra Rasputin'}, {'age': 23, 'name': 'Barbara-Ann'}, {'age': 18, 'name': 'Jonny B Goode'}]
-
->>> first([0,1,2])
-0
-
->>> first('')
-''
-
->>> first('test')
-'t'
-
->>> join([0,1,2], 'glue')
-'0glue1glue2'
-
->>> length('1234')
-4
-
->>> length([1,2,3,4])
-4
-
->>> length_is([], 0)
-True
-
->>> length_is([], 1)
-False
-
->>> length_is('a', 1)
-True
-
->>> length_is('a', 10)
-False
-
->>> slice_('abcdefg', '0')
-''
-
->>> slice_('abcdefg', '1')
-'a'
-
->>> slice_('abcdefg', '-1')
-'abcdef'
-
->>> slice_('abcdefg', '1:2')
-'b'
-
->>> slice_('abcdefg', '1:3')
-'bc'
-
->>> slice_('abcdefg', '0::2')
-'aceg'
-
->>> unordered_list(['item 1', []])
-'\t<li>item 1</li>'
-
->>> unordered_list(['item 1', [['item 1.1', []]]])
-'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>'
-
->>> unordered_list(['item 1', [['item 1.1', []], ['item 1.2', []]]])
-'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t\t<li>item 1.2</li>\n\t</ul>\n\t</li>'
-
->>> add('1', '2')
-3
-
->>> get_digit(123, 1)
-3
-
->>> get_digit(123, 2)
-2
-
->>> get_digit(123, 3)
-1
-
->>> get_digit(123, 4)
-0
-
->>> get_digit(123, 0)
-123
-
->>> get_digit('xyz', 0)
-'xyz'
-
-# real testing of date() is in dateformat.py
->>> date(datetime.datetime(2005, 12, 29), "d F Y")
-'29 December 2005'
->>> date(datetime.datetime(2005, 12, 29), r'jS o\f F')
-'29th of December'
-
-# real testing of time() is done in dateformat.py
->>> time(datetime.time(13), "h")
-'01'
-
->>> time(datetime.time(0), "h")
-'12'
-
-# real testing is done in timesince.py, where we can provide our own 'now'
->>> timesince(datetime.datetime.now() - datetime.timedelta(1))
-'1 day'
-
->>> default("val", "default")
-'val'
-
->>> default(None, "default")
-'default'
-
->>> default('', "default")
-'default'
-
->>> default_if_none("val", "default")
-'val'
-
->>> default_if_none(None, "default")
-'default'
-
->>> default_if_none('', "default")
-''
-
->>> divisibleby(4, 2)
-True
-
->>> divisibleby(4, 3)
-False
-
->>> yesno(True)
-'yes'
-
->>> yesno(False)
-'no'
-
->>> yesno(None)
-'maybe'
-
->>> yesno(True, 'certainly,get out of town,perhaps')
-'certainly'
-
->>> yesno(False, 'certainly,get out of town,perhaps')
-'get out of town'
-
->>> yesno(None, 'certainly,get out of town,perhaps')
-'perhaps'
-
->>> yesno(None, 'certainly,get out of town')
-'get out of town'
-
->>> filesizeformat(1023)
-'1023 bytes'
-
->>> filesizeformat(1024)
-'1.0 KB'
-
->>> filesizeformat(10*1024)
-'10.0 KB'
-
->>> filesizeformat(1024*1024-1)
-'1024.0 KB'
-
->>> filesizeformat(1024*1024)
-'1.0 MB'
-
->>> filesizeformat(1024*1024*50)
-'50.0 MB'
-
->>> filesizeformat(1024*1024*1024-1)
-'1024.0 MB'
-
->>> filesizeformat(1024*1024*1024)
-'1.0 GB'
-
->>> pluralize(1)
-''
-
->>> pluralize(0)
-'s'
-
->>> pluralize(2)
-'s'
-
->>> pluralize([1])
-''
-
->>> pluralize([])
-'s'
-
->>> pluralize([1,2,3])
-'s'
-
->>> pluralize(1,'es')
-''
-
->>> pluralize(0,'es')
-'es'
-
->>> pluralize(2,'es')
-'es'
-
->>> pluralize(1,'y,ies')
-'y'
-
->>> pluralize(0,'y,ies')
-'ies'
-
->>> pluralize(2,'y,ies')
-'ies'
-
->>> pluralize(0,'y,ies,error')
-''
-
->>> phone2numeric('0800 flowers')
-'0800 3569377'
-
-# Filters shouldn't break if passed non-strings
->>> addslashes(123)
-'123'
->>> linenumbers(123)
-'1. 123'
->>> lower(123)
-'123'
->>> make_list(123)
-['1', '2', '3']
->>> slugify(123)
-'123'
->>> title(123)
-'123'
->>> truncatewords(123, 2)
-'123'
->>> upper(123)
-'123'
->>> urlencode(123)
-'123'
->>> urlize(123)
-'123'
->>> urlizetrunc(123, 1)
-'123'
->>> wordcount(123)
-1
->>> wordwrap(123, 2)
-'123'
->>> ljust('123', 4)
-'123 '
->>> rjust('123', 4)
-' 123'
->>> center('123', 5)
-' 123 '
->>> center('123', 6)
-' 123  '
->>> cut(123, '2')
-'13'
->>> escape(123)
-'123'
->>> linebreaks(123)
-'<p>123</p>'
->>> linebreaksbr(123)
-'123'
->>> removetags(123, 'a')
-'123'
->>> striptags(123)
-'123'
-
-"""
-
-from django.template.defaultfilters import *
-import datetime
-
-if __name__ == '__main__':
-    import doctest
-    doctest.testmod()
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-"""Unit-tests for the dispatch project
-"""
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/tests/__init__.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-"""
-Unit-tests for the dispatch project
-"""
-
-from test_dispatcher import *
-from test_robustapply import *
-from test_saferef import *
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/tests/test_dispatcher.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,144 +0,0 @@
-from django.dispatch.dispatcher import *
-from django.dispatch import dispatcher, robust
-import unittest
-import copy
-
-def x(a):
-    return a
-
-class Dummy(object):
-    pass
-
-class Callable(object):
-    def __call__(self, a):
-        return a
-    
-    def a(self, a):
-        return a
-
-class DispatcherTests(unittest.TestCase):
-    """Test suite for dispatcher (barely started)"""
-    
-    def setUp(self):
-        # track the initial state, since it's possible that others have bleed receivers in
-        self.sendersBack = copy.copy(dispatcher.sendersBack)
-        self.connections = copy.copy(dispatcher.connections)
-        self.senders = copy.copy(dispatcher.senders)
-    
-    def _testIsClean(self):
-        """Assert that everything has been cleaned up automatically"""
-        self.assertEqual(dispatcher.sendersBack, self.sendersBack)
-        self.assertEqual(dispatcher.connections, self.connections)
-        self.assertEqual(dispatcher.senders, self.senders)
-    
-    def testExact(self):
-        a = Dummy()
-        signal = 'this'
-        connect(x, signal, a)
-        expected = [(x,a)]
-        result = send('this',a, a=a)
-        self.assertEqual(result, expected)
-        disconnect(x, signal, a)
-        self.assertEqual(list(getAllReceivers(a,signal)), [])
-        self._testIsClean()
-    
-    def testAnonymousSend(self):
-        a = Dummy()
-        signal = 'this'
-        connect(x, signal)
-        expected = [(x,a)]
-        result = send(signal,None, a=a)
-        self.assertEqual(result, expected)
-        disconnect(x, signal)
-        self.assertEqual(list(getAllReceivers(None,signal)), [])
-        self._testIsClean()
-    
-    def testAnyRegistration(self):
-        a = Dummy()
-        signal = 'this'
-        connect(x, signal, Any)
-        expected = [(x,a)]
-        result = send('this',object(), a=a)
-        self.assertEqual(result, expected)
-        disconnect(x, signal, Any)
-        expected = []
-        result = send('this',object(), a=a)
-        self.assertEqual(result, expected)
-        self.assertEqual(list(getAllReceivers(Any,signal)), [])
-        
-        self._testIsClean()
-    
-    def testAnyRegistration2(self):
-        a = Dummy()
-        signal = 'this'
-        connect(x, Any, a)
-        expected = [(x,a)]
-        result = send('this',a, a=a)
-        self.assertEqual(result, expected)
-        disconnect(x, Any, a)
-        self.assertEqual(list(getAllReceivers(a,Any)), [])
-        self._testIsClean()
-    
-    def testGarbageCollected(self):
-        a = Callable()
-        b = Dummy()
-        signal = 'this'
-        connect(a.a, signal, b)
-        expected = []
-        del a
-        result = send('this',b, a=b)
-        self.assertEqual(result, expected)
-        self.assertEqual(list(getAllReceivers(b,signal)), [])
-        self._testIsClean()
-    
-    def testGarbageCollectedObj(self):
-        class x:
-            def __call__(self, a):
-                return a
-        a = Callable()
-        b = Dummy()
-        signal = 'this'
-        connect(a, signal, b)
-        expected = []
-        del a
-        result = send('this',b, a=b)
-        self.assertEqual(result, expected)
-        self.assertEqual(list(getAllReceivers(b,signal)), [])
-        self._testIsClean()
-
-    
-    def testMultipleRegistration(self):
-        a = Callable()
-        b = Dummy()
-        signal = 'this'
-        connect(a, signal, b)
-        connect(a, signal, b)
-        connect(a, signal, b)
-        connect(a, signal, b)
-        connect(a, signal, b)
-        connect(a, signal, b)
-        result = send('this',b, a=b)
-        self.assertEqual(len(result), 1)
-        self.assertEqual(len(list(getAllReceivers(b,signal))), 1)
-        del a
-        del b
-        del result
-        self._testIsClean()
-    
-    def testRobust(self):
-        """Test the sendRobust function"""
-        def fails():
-            raise ValueError('this')
-        a = object()
-        signal = 'this'
-        connect(fails, Any, a)
-        result = robust.sendRobust('this',a, a=a)
-        err = result[0][1]
-        self.assert_(isinstance(err, ValueError))
-        self.assertEqual(err.args, ('this',))
-
-def getSuite():
-    return unittest.makeSuite(DispatcherTests,'test')
-
-if __name__ == "__main__":
-    unittest.main ()
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/tests/test_robustapply.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,34 +0,0 @@
-from django.dispatch.robustapply import *
-
-import unittest
-
-def noArgument():
-    pass
-
-def oneArgument(blah):
-    pass
-
-def twoArgument(blah, other):
-    pass
-
-class TestCases(unittest.TestCase):
-    def test01(self):
-        robustApply(noArgument)
-    
-    def test02(self):
-        self.assertRaises(TypeError, robustApply, noArgument, "this")
-    
-    def test03(self):
-        self.assertRaises(TypeError, robustApply, oneArgument)
-    
-    def test04(self):
-        """Raise error on duplication of a particular argument"""
-        self.assertRaises(TypeError, robustApply, oneArgument, "this", blah = "that")
-
-def getSuite():
-    return unittest.makeSuite(TestCases,'test')
-
-
-if __name__ == "__main__":
-    unittest.main()
-    
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/dispatch/tests/test_saferef.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,77 +0,0 @@
-from django.dispatch.saferef import *
-
-import unittest
-
-class Test1(object):
-    def x(self):
-        pass
-
-def test2(obj):
-    pass
-
-class Test2(object):
-    def __call__(self, obj):
-        pass
-
-class Tester(unittest.TestCase):
-    def setUp(self):
-        ts = []
-        ss = []
-        for x in xrange(5000):
-            t = Test1()
-            ts.append(t)
-            s = safeRef(t.x, self._closure)
-            ss.append(s)
-        ts.append(test2)
-        ss.append(safeRef(test2, self._closure))
-        for x in xrange(30):
-            t = Test2()
-            ts.append(t)
-            s = safeRef(t, self._closure)
-            ss.append(s)
-        self.ts = ts
-        self.ss = ss
-        self.closureCount = 0
-    
-    def tearDown(self):
-        del self.ts
-        del self.ss
-    
-    def testIn(self):
-        """Test the "in" operator for safe references (cmp)"""
-        for t in self.ts[:50]:
-            self.assert_(safeRef(t.x) in self.ss)
-    
-    def testValid(self):
-        """Test that the references are valid (return instance methods)"""
-        for s in self.ss:
-            self.assert_(s())
-    
-    def testShortCircuit (self):
-        """Test that creation short-circuits to reuse existing references"""
-        sd = {}
-        for s in self.ss:
-            sd[s] = 1
-        for t in self.ts:
-            if hasattr(t, 'x'):
-                self.assert_(sd.has_key(safeRef(t.x)))
-            else:
-                self.assert_(sd.has_key(safeRef(t)))
-    
-    def testRepresentation (self):
-        """Test that the reference object's representation works
-        
-        XXX Doesn't currently check the results, just that no error
-            is raised
-        """
-        repr(self.ss[-1])
-        
-    def _closure(self, ref):
-        """Dumb utility mechanism to increment deletion counter"""
-        self.closureCount +=1
-
-def getSuite():
-    return unittest.makeSuite(Tester,'test')
-
-if __name__ == "__main__":
-    unittest.main()
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/forms/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,3585 +0,0 @@
-# -*- coding: utf-8 -*-
-r"""
->>> from django.newforms import *
->>> import datetime
->>> import re
-
-###########
-# Widgets #
-###########
-
-Each Widget class corresponds to an HTML form widget. A Widget knows how to
-render itself, given a field name and some data. Widgets don't perform
-validation.
-
-# TextInput Widget ############################################################
-
->>> w = TextInput()
->>> w.render('email', '')
-u'<input type="text" name="email" />'
->>> w.render('email', None)
-u'<input type="text" name="email" />'
->>> w.render('email', 'test@example.com')
-u'<input type="text" name="email" value="test@example.com" />'
->>> w.render('email', 'some "quoted" & ampersanded value')
-u'<input type="text" name="email" value="some &quot;quoted&quot; &amp; ampersanded value" />'
->>> w.render('email', 'test@example.com', attrs={'class': 'fun'})
-u'<input type="text" name="email" value="test@example.com" class="fun" />'
-
-# Note that doctest in Python 2.4 (and maybe 2.5?) doesn't support non-ascii
-# characters in output, so we're displaying the repr() here.
->>> w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'})
-u'<input type="text" name="email" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" class="fun" />'
-
-You can also pass 'attrs' to the constructor:
->>> w = TextInput(attrs={'class': 'fun'})
->>> w.render('email', '')
-u'<input type="text" class="fun" name="email" />'
->>> w.render('email', 'foo@example.com')
-u'<input type="text" class="fun" value="foo@example.com" name="email" />'
-
-'attrs' passed to render() get precedence over those passed to the constructor:
->>> w = TextInput(attrs={'class': 'pretty'})
->>> w.render('email', '', attrs={'class': 'special'})
-u'<input type="text" class="special" name="email" />'
-
-# PasswordInput Widget ############################################################
-
->>> w = PasswordInput()
->>> w.render('email', '')
-u'<input type="password" name="email" />'
->>> w.render('email', None)
-u'<input type="password" name="email" />'
->>> w.render('email', 'test@example.com')
-u'<input type="password" name="email" value="test@example.com" />'
->>> w.render('email', 'some "quoted" & ampersanded value')
-u'<input type="password" name="email" value="some &quot;quoted&quot; &amp; ampersanded value" />'
->>> w.render('email', 'test@example.com', attrs={'class': 'fun'})
-u'<input type="password" name="email" value="test@example.com" class="fun" />'
-
-You can also pass 'attrs' to the constructor:
->>> w = PasswordInput(attrs={'class': 'fun'})
->>> w.render('email', '')
-u'<input type="password" class="fun" name="email" />'
->>> w.render('email', 'foo@example.com')
-u'<input type="password" class="fun" value="foo@example.com" name="email" />'
-
-'attrs' passed to render() get precedence over those passed to the constructor:
->>> w = PasswordInput(attrs={'class': 'pretty'})
->>> w.render('email', '', attrs={'class': 'special'})
-u'<input type="password" class="special" name="email" />'
-
->>> w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'})
-u'<input type="password" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />'
-
-The render_value argument lets you specify whether the widget should render
-its value. You may want to do this for security reasons.
->>> w = PasswordInput(render_value=True)
->>> w.render('email', 'secret')
-u'<input type="password" name="email" value="secret" />'
->>> w = PasswordInput(render_value=False)
->>> w.render('email', '')
-u'<input type="password" name="email" />'
->>> w.render('email', None)
-u'<input type="password" name="email" />'
->>> w.render('email', 'secret')
-u'<input type="password" name="email" />'
->>> w = PasswordInput(attrs={'class': 'fun'}, render_value=False)
->>> w.render('email', 'secret')
-u'<input type="password" class="fun" name="email" />'
-
-# HiddenInput Widget ############################################################
-
->>> w = HiddenInput()
->>> w.render('email', '')
-u'<input type="hidden" name="email" />'
->>> w.render('email', None)
-u'<input type="hidden" name="email" />'
->>> w.render('email', 'test@example.com')
-u'<input type="hidden" name="email" value="test@example.com" />'
->>> w.render('email', 'some "quoted" & ampersanded value')
-u'<input type="hidden" name="email" value="some &quot;quoted&quot; &amp; ampersanded value" />'
->>> w.render('email', 'test@example.com', attrs={'class': 'fun'})
-u'<input type="hidden" name="email" value="test@example.com" class="fun" />'
-
-You can also pass 'attrs' to the constructor:
->>> w = HiddenInput(attrs={'class': 'fun'})
->>> w.render('email', '')
-u'<input type="hidden" class="fun" name="email" />'
->>> w.render('email', 'foo@example.com')
-u'<input type="hidden" class="fun" value="foo@example.com" name="email" />'
-
-'attrs' passed to render() get precedence over those passed to the constructor:
->>> w = HiddenInput(attrs={'class': 'pretty'})
->>> w.render('email', '', attrs={'class': 'special'})
-u'<input type="hidden" class="special" name="email" />'
-
->>> w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'})
-u'<input type="hidden" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />'
-
-'attrs' passed to render() get precedence over those passed to the constructor:
->>> w = HiddenInput(attrs={'class': 'pretty'})
->>> w.render('email', '', attrs={'class': 'special'})
-u'<input type="hidden" class="special" name="email" />'
-
-# MultipleHiddenInput Widget ##################################################
-
->>> w = MultipleHiddenInput()
->>> w.render('email', [])
-u''
->>> w.render('email', None)
-u''
->>> w.render('email', ['test@example.com'])
-u'<input type="hidden" name="email" value="test@example.com" />'
->>> w.render('email', ['some "quoted" & ampersanded value'])
-u'<input type="hidden" name="email" value="some &quot;quoted&quot; &amp; ampersanded value" />'
->>> w.render('email', ['test@example.com', 'foo@example.com'])
-u'<input type="hidden" name="email" value="test@example.com" />\n<input type="hidden" name="email" value="foo@example.com" />'
->>> w.render('email', ['test@example.com'], attrs={'class': 'fun'})
-u'<input type="hidden" name="email" value="test@example.com" class="fun" />'
->>> w.render('email', ['test@example.com', 'foo@example.com'], attrs={'class': 'fun'})
-u'<input type="hidden" name="email" value="test@example.com" class="fun" />\n<input type="hidden" name="email" value="foo@example.com" class="fun" />'
-
-You can also pass 'attrs' to the constructor:
->>> w = MultipleHiddenInput(attrs={'class': 'fun'})
->>> w.render('email', [])
-u''
->>> w.render('email', ['foo@example.com'])
-u'<input type="hidden" class="fun" value="foo@example.com" name="email" />'
->>> w.render('email', ['foo@example.com', 'test@example.com'])
-u'<input type="hidden" class="fun" value="foo@example.com" name="email" />\n<input type="hidden" class="fun" value="test@example.com" name="email" />'
-
-'attrs' passed to render() get precedence over those passed to the constructor:
->>> w = MultipleHiddenInput(attrs={'class': 'pretty'})
->>> w.render('email', ['foo@example.com'], attrs={'class': 'special'})
-u'<input type="hidden" class="special" value="foo@example.com" name="email" />'
-
->>> w.render('email', ['ŠĐĆŽćžšđ'], attrs={'class': 'fun'})
-u'<input type="hidden" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />'
-
-'attrs' passed to render() get precedence over those passed to the constructor:
->>> w = MultipleHiddenInput(attrs={'class': 'pretty'})
->>> w.render('email', ['foo@example.com'], attrs={'class': 'special'})
-u'<input type="hidden" class="special" value="foo@example.com" name="email" />'
-
-# FileInput Widget ############################################################
-
->>> w = FileInput()
->>> w.render('email', '')
-u'<input type="file" name="email" />'
->>> w.render('email', None)
-u'<input type="file" name="email" />'
->>> w.render('email', 'test@example.com')
-u'<input type="file" name="email" value="test@example.com" />'
->>> w.render('email', 'some "quoted" & ampersanded value')
-u'<input type="file" name="email" value="some &quot;quoted&quot; &amp; ampersanded value" />'
->>> w.render('email', 'test@example.com', attrs={'class': 'fun'})
-u'<input type="file" name="email" value="test@example.com" class="fun" />'
-
-You can also pass 'attrs' to the constructor:
->>> w = FileInput(attrs={'class': 'fun'})
->>> w.render('email', '')
-u'<input type="file" class="fun" name="email" />'
->>> w.render('email', 'foo@example.com')
-u'<input type="file" class="fun" value="foo@example.com" name="email" />'
-
->>> w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'})
-u'<input type="file" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />'
-
-# Textarea Widget #############################################################
-
->>> w = Textarea()
->>> w.render('msg', '')
-u'<textarea name="msg"></textarea>'
->>> w.render('msg', None)
-u'<textarea name="msg"></textarea>'
->>> w.render('msg', 'value')
-u'<textarea name="msg">value</textarea>'
->>> w.render('msg', 'some "quoted" & ampersanded value')
-u'<textarea name="msg">some &quot;quoted&quot; &amp; ampersanded value</textarea>'
->>> w.render('msg', 'value', attrs={'class': 'pretty'})
-u'<textarea name="msg" class="pretty">value</textarea>'
-
-You can also pass 'attrs' to the constructor:
->>> w = Textarea(attrs={'class': 'pretty'})
->>> w.render('msg', '')
-u'<textarea class="pretty" name="msg"></textarea>'
->>> w.render('msg', 'example')
-u'<textarea class="pretty" name="msg">example</textarea>'
-
-'attrs' passed to render() get precedence over those passed to the constructor:
->>> w = Textarea(attrs={'class': 'pretty'})
->>> w.render('msg', '', attrs={'class': 'special'})
-u'<textarea class="special" name="msg"></textarea>'
-
->>> w.render('msg', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'})
-u'<textarea class="fun" name="msg">\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111</textarea>'
-
-# CheckboxInput Widget ########################################################
-
->>> w = CheckboxInput()
->>> w.render('is_cool', '')
-u'<input type="checkbox" name="is_cool" />'
->>> w.render('is_cool', None)
-u'<input type="checkbox" name="is_cool" />'
->>> w.render('is_cool', False)
-u'<input type="checkbox" name="is_cool" />'
->>> w.render('is_cool', True)
-u'<input checked="checked" type="checkbox" name="is_cool" />'
-
-Using any value that's not in ('', None, False, True) will check the checkbox
-and set the 'value' attribute.
->>> w.render('is_cool', 'foo')
-u'<input checked="checked" type="checkbox" name="is_cool" value="foo" />'
-
->>> w.render('is_cool', False, attrs={'class': 'pretty'})
-u'<input type="checkbox" name="is_cool" class="pretty" />'
-
-You can also pass 'attrs' to the constructor:
->>> w = CheckboxInput(attrs={'class': 'pretty'})
->>> w.render('is_cool', '')
-u'<input type="checkbox" class="pretty" name="is_cool" />'
-
-'attrs' passed to render() get precedence over those passed to the constructor:
->>> w = CheckboxInput(attrs={'class': 'pretty'})
->>> w.render('is_cool', '', attrs={'class': 'special'})
-u'<input type="checkbox" class="special" name="is_cool" />'
-
-You can pass 'check_test' to the constructor. This is a callable that takes the
-value and returns True if the box should be checked.
->>> w = CheckboxInput(check_test=lambda value: value.startswith('hello'))
->>> w.render('greeting', '')
-u'<input type="checkbox" name="greeting" />'
->>> w.render('greeting', 'hello')
-u'<input checked="checked" type="checkbox" name="greeting" value="hello" />'
->>> w.render('greeting', 'hello there')
-u'<input checked="checked" type="checkbox" name="greeting" value="hello there" />'
->>> w.render('greeting', 'hello & goodbye')
-u'<input checked="checked" type="checkbox" name="greeting" value="hello &amp; goodbye" />'
-
-A subtlety: If the 'check_test' argument cannot handle a value and raises any
-exception during its __call__, then the exception will be swallowed and the box
-will not be checked. In this example, the 'check_test' assumes the value has a
-startswith() method, which fails for the values True, False and None.
->>> w.render('greeting', True)
-u'<input type="checkbox" name="greeting" />'
->>> w.render('greeting', False)
-u'<input type="checkbox" name="greeting" />'
->>> w.render('greeting', None)
-u'<input type="checkbox" name="greeting" />'
-
-# Select Widget ###############################################################
-
->>> w = Select()
->>> print w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<select name="beatle">
-<option value="J" selected="selected">John</option>
-<option value="P">Paul</option>
-<option value="G">George</option>
-<option value="R">Ringo</option>
-</select>
-
-If the value is None, none of the options are selected:
->>> print w.render('beatle', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<select name="beatle">
-<option value="J">John</option>
-<option value="P">Paul</option>
-<option value="G">George</option>
-<option value="R">Ringo</option>
-</select>
-
-If the value corresponds to a label (but not to an option value), none of the options are selected:
->>> print w.render('beatle', 'John', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<select name="beatle">
-<option value="J">John</option>
-<option value="P">Paul</option>
-<option value="G">George</option>
-<option value="R">Ringo</option>
-</select>
-
-The value is compared to its str():
->>> print w.render('num', 2, choices=[('1', '1'), ('2', '2'), ('3', '3')])
-<select name="num">
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-</select>
->>> print w.render('num', '2', choices=[(1, 1), (2, 2), (3, 3)])
-<select name="num">
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-</select>
->>> print w.render('num', 2, choices=[(1, 1), (2, 2), (3, 3)])
-<select name="num">
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-</select>
-
-The 'choices' argument can be any iterable:
->>> from itertools import chain
->>> def get_choices():
-...     for i in range(5):
-...         yield (i, i)
->>> print w.render('num', 2, choices=get_choices())
-<select name="num">
-<option value="0">0</option>
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-<option value="4">4</option>
-</select>
->>> things = ({'id': 1, 'name': 'And Boom'}, {'id': 2, 'name': 'One More Thing!'})
->>> class SomeForm(Form):
-...     somechoice = ChoiceField(choices=chain((('', '-'*9),), [(thing['id'], thing['name']) for thing in things]))
->>> f = SomeForm()
->>> f.as_table()
-u'<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="" selected="selected">---------</option>\n<option value="1">And Boom</option>\n<option value="2">One More Thing!</option>\n</select></td></tr>'
->>> f.as_table()
-u'<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="" selected="selected">---------</option>\n<option value="1">And Boom</option>\n<option value="2">One More Thing!</option>\n</select></td></tr>'
->>> f = SomeForm({'somechoice': 2})
->>> f.as_table()
-u'<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="">---------</option>\n<option value="1">And Boom</option>\n<option value="2" selected="selected">One More Thing!</option>\n</select></td></tr>'
-
-You can also pass 'choices' to the constructor:
->>> w = Select(choices=[(1, 1), (2, 2), (3, 3)])
->>> print w.render('num', 2)
-<select name="num">
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-</select>
-
-If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
->>> print w.render('num', 2, choices=[(4, 4), (5, 5)])
-<select name="num">
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-<option value="4">4</option>
-<option value="5">5</option>
-</select>
-
->>> w.render('email', 'ŠĐĆŽćžšđ', choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')])
-u'<select name="email">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" selected="selected">\u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</option>\n<option value="\u0107\u017e\u0161\u0111">abc\u0107\u017e\u0161\u0111</option>\n</select>'
-
-If choices is passed to the constructor and is a generator, it can be iterated
-over multiple times without getting consumed:
->>> w = Select(choices=get_choices())
->>> print w.render('num', 2)
-<select name="num">
-<option value="0">0</option>
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-<option value="4">4</option>
-</select>
->>> print w.render('num', 3)
-<select name="num">
-<option value="0">0</option>
-<option value="1">1</option>
-<option value="2">2</option>
-<option value="3" selected="selected">3</option>
-<option value="4">4</option>
-</select>
-
-# NullBooleanSelect Widget ####################################################
-
->>> w = NullBooleanSelect()
->>> print w.render('is_cool', True)
-<select name="is_cool">
-<option value="1">Unknown</option>
-<option value="2" selected="selected">Yes</option>
-<option value="3">No</option>
-</select>
->>> print w.render('is_cool', False)
-<select name="is_cool">
-<option value="1">Unknown</option>
-<option value="2">Yes</option>
-<option value="3" selected="selected">No</option>
-</select>
->>> print w.render('is_cool', None)
-<select name="is_cool">
-<option value="1" selected="selected">Unknown</option>
-<option value="2">Yes</option>
-<option value="3">No</option>
-</select>
->>> print w.render('is_cool', '2')
-<select name="is_cool">
-<option value="1">Unknown</option>
-<option value="2" selected="selected">Yes</option>
-<option value="3">No</option>
-</select>
->>> print w.render('is_cool', '3')
-<select name="is_cool">
-<option value="1">Unknown</option>
-<option value="2">Yes</option>
-<option value="3" selected="selected">No</option>
-</select>
-
-# SelectMultiple Widget #######################################################
-
->>> w = SelectMultiple()
->>> print w.render('beatles', ['J'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<select multiple="multiple" name="beatles">
-<option value="J" selected="selected">John</option>
-<option value="P">Paul</option>
-<option value="G">George</option>
-<option value="R">Ringo</option>
-</select>
->>> print w.render('beatles', ['J', 'P'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<select multiple="multiple" name="beatles">
-<option value="J" selected="selected">John</option>
-<option value="P" selected="selected">Paul</option>
-<option value="G">George</option>
-<option value="R">Ringo</option>
-</select>
->>> print w.render('beatles', ['J', 'P', 'R'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<select multiple="multiple" name="beatles">
-<option value="J" selected="selected">John</option>
-<option value="P" selected="selected">Paul</option>
-<option value="G">George</option>
-<option value="R" selected="selected">Ringo</option>
-</select>
-
-If the value is None, none of the options are selected:
->>> print w.render('beatles', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<select multiple="multiple" name="beatles">
-<option value="J">John</option>
-<option value="P">Paul</option>
-<option value="G">George</option>
-<option value="R">Ringo</option>
-</select>
-
-If the value corresponds to a label (but not to an option value), none of the options are selected:
->>> print w.render('beatles', ['John'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<select multiple="multiple" name="beatles">
-<option value="J">John</option>
-<option value="P">Paul</option>
-<option value="G">George</option>
-<option value="R">Ringo</option>
-</select>
-
-If multiple values are given, but some of them are not valid, the valid ones are selected:
->>> print w.render('beatles', ['J', 'G', 'foo'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<select multiple="multiple" name="beatles">
-<option value="J" selected="selected">John</option>
-<option value="P">Paul</option>
-<option value="G" selected="selected">George</option>
-<option value="R">Ringo</option>
-</select>
-
-The value is compared to its str():
->>> print w.render('nums', [2], choices=[('1', '1'), ('2', '2'), ('3', '3')])
-<select multiple="multiple" name="nums">
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-</select>
->>> print w.render('nums', ['2'], choices=[(1, 1), (2, 2), (3, 3)])
-<select multiple="multiple" name="nums">
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-</select>
->>> print w.render('nums', [2], choices=[(1, 1), (2, 2), (3, 3)])
-<select multiple="multiple" name="nums">
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-</select>
-
-The 'choices' argument can be any iterable:
->>> def get_choices():
-...     for i in range(5):
-...         yield (i, i)
->>> print w.render('nums', [2], choices=get_choices())
-<select multiple="multiple" name="nums">
-<option value="0">0</option>
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-<option value="4">4</option>
-</select>
-
-You can also pass 'choices' to the constructor:
->>> w = SelectMultiple(choices=[(1, 1), (2, 2), (3, 3)])
->>> print w.render('nums', [2])
-<select multiple="multiple" name="nums">
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-</select>
-
-If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
->>> print w.render('nums', [2], choices=[(4, 4), (5, 5)])
-<select multiple="multiple" name="nums">
-<option value="1">1</option>
-<option value="2" selected="selected">2</option>
-<option value="3">3</option>
-<option value="4">4</option>
-<option value="5">5</option>
-</select>
-
->>> w.render('nums', ['ŠĐĆŽćžšđ'], choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')])
-u'<select multiple="multiple" name="nums">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" selected="selected">\u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</option>\n<option value="\u0107\u017e\u0161\u0111">abc\u0107\u017e\u0161\u0111</option>\n</select>'
-
-# RadioSelect Widget ##########################################################
-
->>> w = RadioSelect()
->>> print w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<ul>
-<li><label><input checked="checked" type="radio" name="beatle" value="J" /> John</label></li>
-<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
-<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
-<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
-</ul>
-
-If the value is None, none of the options are checked:
->>> print w.render('beatle', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<ul>
-<li><label><input type="radio" name="beatle" value="J" /> John</label></li>
-<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
-<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
-<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
-</ul>
-
-If the value corresponds to a label (but not to an option value), none of the options are checked:
->>> print w.render('beatle', 'John', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<ul>
-<li><label><input type="radio" name="beatle" value="J" /> John</label></li>
-<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
-<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
-<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
-</ul>
-
-The value is compared to its str():
->>> print w.render('num', 2, choices=[('1', '1'), ('2', '2'), ('3', '3')])
-<ul>
-<li><label><input type="radio" name="num" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
-<li><label><input type="radio" name="num" value="3" /> 3</label></li>
-</ul>
->>> print w.render('num', '2', choices=[(1, 1), (2, 2), (3, 3)])
-<ul>
-<li><label><input type="radio" name="num" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
-<li><label><input type="radio" name="num" value="3" /> 3</label></li>
-</ul>
->>> print w.render('num', 2, choices=[(1, 1), (2, 2), (3, 3)])
-<ul>
-<li><label><input type="radio" name="num" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
-<li><label><input type="radio" name="num" value="3" /> 3</label></li>
-</ul>
-
-The 'choices' argument can be any iterable:
->>> def get_choices():
-...     for i in range(5):
-...         yield (i, i)
->>> print w.render('num', 2, choices=get_choices())
-<ul>
-<li><label><input type="radio" name="num" value="0" /> 0</label></li>
-<li><label><input type="radio" name="num" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
-<li><label><input type="radio" name="num" value="3" /> 3</label></li>
-<li><label><input type="radio" name="num" value="4" /> 4</label></li>
-</ul>
-
-You can also pass 'choices' to the constructor:
->>> w = RadioSelect(choices=[(1, 1), (2, 2), (3, 3)])
->>> print w.render('num', 2)
-<ul>
-<li><label><input type="radio" name="num" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
-<li><label><input type="radio" name="num" value="3" /> 3</label></li>
-</ul>
-
-If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
->>> print w.render('num', 2, choices=[(4, 4), (5, 5)])
-<ul>
-<li><label><input type="radio" name="num" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
-<li><label><input type="radio" name="num" value="3" /> 3</label></li>
-<li><label><input type="radio" name="num" value="4" /> 4</label></li>
-<li><label><input type="radio" name="num" value="5" /> 5</label></li>
-</ul>
-
-The render() method returns a RadioFieldRenderer object, whose str() is a <ul>.
-You can manipulate that object directly to customize the way the RadioSelect
-is rendered.
->>> w = RadioSelect()
->>> r = w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
->>> for inp in r:
-...     print inp
-<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label>
-<label><input type="radio" name="beatle" value="P" /> Paul</label>
-<label><input type="radio" name="beatle" value="G" /> George</label>
-<label><input type="radio" name="beatle" value="R" /> Ringo</label>
->>> for inp in r:
-...     print '%s<br />' % inp
-<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label><br />
-<label><input type="radio" name="beatle" value="P" /> Paul</label><br />
-<label><input type="radio" name="beatle" value="G" /> George</label><br />
-<label><input type="radio" name="beatle" value="R" /> Ringo</label><br />
->>> for inp in r:
-...     print '<p>%s %s</p>' % (inp.tag(), inp.choice_label)
-<p><input checked="checked" type="radio" name="beatle" value="J" /> John</p>
-<p><input type="radio" name="beatle" value="P" /> Paul</p>
-<p><input type="radio" name="beatle" value="G" /> George</p>
-<p><input type="radio" name="beatle" value="R" /> Ringo</p>
->>> for inp in r:
-...     print '%s %s %s %s %s' % (inp.name, inp.value, inp.choice_value, inp.choice_label, inp.is_checked())
-beatle J J John True
-beatle J P Paul False
-beatle J G George False
-beatle J R Ringo False
-
-A RadioFieldRenderer object also allows index access to individual RadioInput
-objects.
->>> w = RadioSelect()
->>> r = w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
->>> print r[1]
-<label><input type="radio" name="beatle" value="P" /> Paul</label>
->>> print r[0]
-<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label>
->>> r[0].is_checked()
-True
->>> r[1].is_checked()
-False
->>> r[1].name, r[1].value, r[1].choice_value, r[1].choice_label
-('beatle', u'J', u'P', u'Paul')
->>> r[10]
-Traceback (most recent call last):
-...
-IndexError: list index out of range
-
->>> w = RadioSelect()
->>> unicode(w.render('email', 'ŠĐĆŽćžšđ', choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')]))
-u'<ul>\n<li><label><input checked="checked" type="radio" name="email" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" /> \u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</label></li>\n<li><label><input type="radio" name="email" value="\u0107\u017e\u0161\u0111" /> abc\u0107\u017e\u0161\u0111</label></li>\n</ul>'
-
-# CheckboxSelectMultiple Widget ###############################################
-
->>> w = CheckboxSelectMultiple()
->>> print w.render('beatles', ['J'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<ul>
-<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
-<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
-<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
-<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
-</ul>
->>> print w.render('beatles', ['J', 'P'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<ul>
-<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
-<li><label><input checked="checked" type="checkbox" name="beatles" value="P" /> Paul</label></li>
-<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
-<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
-</ul>
->>> print w.render('beatles', ['J', 'P', 'R'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<ul>
-<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
-<li><label><input checked="checked" type="checkbox" name="beatles" value="P" /> Paul</label></li>
-<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
-<li><label><input checked="checked" type="checkbox" name="beatles" value="R" /> Ringo</label></li>
-</ul>
-
-If the value is None, none of the options are selected:
->>> print w.render('beatles', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<ul>
-<li><label><input type="checkbox" name="beatles" value="J" /> John</label></li>
-<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
-<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
-<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
-</ul>
-
-If the value corresponds to a label (but not to an option value), none of the options are selected:
->>> print w.render('beatles', ['John'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<ul>
-<li><label><input type="checkbox" name="beatles" value="J" /> John</label></li>
-<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
-<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
-<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
-</ul>
-
-If multiple values are given, but some of them are not valid, the valid ones are selected:
->>> print w.render('beatles', ['J', 'G', 'foo'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
-<ul>
-<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
-<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
-<li><label><input checked="checked" type="checkbox" name="beatles" value="G" /> George</label></li>
-<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
-</ul>
-
-The value is compared to its str():
->>> print w.render('nums', [2], choices=[('1', '1'), ('2', '2'), ('3', '3')])
-<ul>
-<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
-<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
-</ul>
->>> print w.render('nums', ['2'], choices=[(1, 1), (2, 2), (3, 3)])
-<ul>
-<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
-<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
-</ul>
->>> print w.render('nums', [2], choices=[(1, 1), (2, 2), (3, 3)])
-<ul>
-<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
-<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
-</ul>
-
-The 'choices' argument can be any iterable:
->>> def get_choices():
-...     for i in range(5):
-...         yield (i, i)
->>> print w.render('nums', [2], choices=get_choices())
-<ul>
-<li><label><input type="checkbox" name="nums" value="0" /> 0</label></li>
-<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
-<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
-<li><label><input type="checkbox" name="nums" value="4" /> 4</label></li>
-</ul>
-
-You can also pass 'choices' to the constructor:
->>> w = CheckboxSelectMultiple(choices=[(1, 1), (2, 2), (3, 3)])
->>> print w.render('nums', [2])
-<ul>
-<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
-<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
-</ul>
-
-If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
->>> print w.render('nums', [2], choices=[(4, 4), (5, 5)])
-<ul>
-<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
-<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
-<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
-<li><label><input type="checkbox" name="nums" value="4" /> 4</label></li>
-<li><label><input type="checkbox" name="nums" value="5" /> 5</label></li>
-</ul>
-
->>> w.render('nums', ['ŠĐĆŽćžšđ'], choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')])
-u'<ul>\n<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>\n<li><label><input type="checkbox" name="nums" value="2" /> 2</label></li>\n<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>\n<li><label><input checked="checked" type="checkbox" name="nums" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" /> \u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</label></li>\n<li><label><input type="checkbox" name="nums" value="\u0107\u017e\u0161\u0111" /> abc\u0107\u017e\u0161\u0111</label></li>\n</ul>'
-
-# MultiWidget #################################################################
-
->>> class MyMultiWidget(MultiWidget):
-...     def decompress(self, value):
-...         if value:
-...             return value.split('__')
-...         return ['', '']
-...     def format_output(self, rendered_widgets):
-...         return u'<br />'.join(rendered_widgets)
->>> w = MyMultiWidget(widgets=(TextInput(attrs={'class': 'big'}), TextInput(attrs={'class': 'small'})))
->>> w.render('name', ['john', 'lennon'])
-u'<input type="text" class="big" value="john" name="name_0" /><br /><input type="text" class="small" value="lennon" name="name_1" />'
->>> w.render('name', 'john__lennon')
-u'<input type="text" class="big" value="john" name="name_0" /><br /><input type="text" class="small" value="lennon" name="name_1" />'
-
-# SplitDateTimeWidget #########################################################
-
->>> w = SplitDateTimeWidget()
->>> w.render('date', '')
-u'<input type="text" name="date_0" /><input type="text" name="date_1" />'
->>> w.render('date', None)
-u'<input type="text" name="date_0" /><input type="text" name="date_1" />'
->>> w.render('date', datetime.datetime(2006, 1, 10, 7, 30))
-u'<input type="text" name="date_0" value="2006-01-10" /><input type="text" name="date_1" value="07:30:00" />'
->>> w.render('date', [datetime.date(2006, 1, 10), datetime.time(7, 30)])
-u'<input type="text" name="date_0" value="2006-01-10" /><input type="text" name="date_1" value="07:30:00" />'
-
-You can also pass 'attrs' to the constructor. In this case, the attrs will be
-included on both widgets.
->>> w = SplitDateTimeWidget(attrs={'class': 'pretty'})
->>> w.render('date', datetime.datetime(2006, 1, 10, 7, 30))
-u'<input type="text" class="pretty" value="2006-01-10" name="date_0" /><input type="text" class="pretty" value="07:30:00" name="date_1" />'
-
-##########
-# Fields #
-##########
-
-Each Field class does some sort of validation. Each Field has a clean() method,
-which either raises django.newforms.ValidationError or returns the "clean"
-data -- usually a Unicode object, but, in some rare cases, a list.
-
-Each Field's __init__() takes at least these parameters:
-    required -- Boolean that specifies whether the field is required.
-                True by default.
-    widget -- A Widget class, or instance of a Widget class, that should be
-              used for this Field when displaying it. Each Field has a default
-              Widget that it'll use if you don't specify this. In most cases,
-              the default widget is TextInput.
-    label -- A verbose name for this field, for use in displaying this field in
-             a form. By default, Django will use a "pretty" version of the form
-             field name, if the Field is part of a Form.
-    initial -- A value to use in this Field's initial display. This value is
-               *not* used as a fallback if data isn't given.
-
-Other than that, the Field subclasses have class-specific options for
-__init__(). For example, CharField has a max_length option.
-
-# CharField ###################################################################
-
->>> f = CharField()
->>> f.clean(1)
-u'1'
->>> f.clean('hello')
-u'hello'
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean([1, 2, 3])
-u'[1, 2, 3]'
-
->>> f = CharField(required=False)
->>> f.clean(1)
-u'1'
->>> f.clean('hello')
-u'hello'
->>> f.clean(None)
-u''
->>> f.clean('')
-u''
->>> f.clean([1, 2, 3])
-u'[1, 2, 3]'
-
-CharField accepts an optional max_length parameter:
->>> f = CharField(max_length=10, required=False)
->>> f.clean('12345')
-u'12345'
->>> f.clean('1234567890')
-u'1234567890'
->>> f.clean('1234567890a')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at most 10 characters.']
-
-CharField accepts an optional min_length parameter:
->>> f = CharField(min_length=10, required=False)
->>> f.clean('')
-u''
->>> f.clean('12345')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at least 10 characters.']
->>> f.clean('1234567890')
-u'1234567890'
->>> f.clean('1234567890a')
-u'1234567890a'
-
->>> f = CharField(min_length=10, required=True)
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('12345')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at least 10 characters.']
->>> f.clean('1234567890')
-u'1234567890'
->>> f.clean('1234567890a')
-u'1234567890a'
-
-# IntegerField ################################################################
-
->>> f = IntegerField()
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('1')
-1
->>> isinstance(f.clean('1'), int)
-True
->>> f.clean('23')
-23
->>> f.clean('a')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a whole number.']
->>> f.clean('1 ')
-1
->>> f.clean(' 1')
-1
->>> f.clean(' 1 ')
-1
->>> f.clean('1a')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a whole number.']
-
->>> f = IntegerField(required=False)
->>> f.clean('')
->>> repr(f.clean(''))
-'None'
->>> f.clean(None)
->>> repr(f.clean(None))
-'None'
->>> f.clean('1')
-1
->>> isinstance(f.clean('1'), int)
-True
->>> f.clean('23')
-23
->>> f.clean('a')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a whole number.']
->>> f.clean('1 ')
-1
->>> f.clean(' 1')
-1
->>> f.clean(' 1 ')
-1
->>> f.clean('1a')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a whole number.']
-
-IntegerField accepts an optional max_value parameter:
->>> f = IntegerField(max_value=10)
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(1)
-1
->>> f.clean(10)
-10
->>> f.clean(11)
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value is less than or equal to 10.']
->>> f.clean('10')
-10
->>> f.clean('11')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value is less than or equal to 10.']
-
-IntegerField accepts an optional min_value parameter:
->>> f = IntegerField(min_value=10)
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(1)
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value is greater than or equal to 10.']
->>> f.clean(10)
-10
->>> f.clean(11)
-11
->>> f.clean('10')
-10
->>> f.clean('11')
-11
-
-min_value and max_value can be used together:
->>> f = IntegerField(min_value=10, max_value=20)
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(1)
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value is greater than or equal to 10.']
->>> f.clean(10)
-10
->>> f.clean(11)
-11
->>> f.clean('10')
-10
->>> f.clean('11')
-11
->>> f.clean(20)
-20
->>> f.clean(21)
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value is less than or equal to 20.']
-
-# DateField ###################################################################
-
->>> import datetime
->>> f = DateField()
->>> f.clean(datetime.date(2006, 10, 25))
-datetime.date(2006, 10, 25)
->>> f.clean(datetime.datetime(2006, 10, 25, 14, 30))
-datetime.date(2006, 10, 25)
->>> f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))
-datetime.date(2006, 10, 25)
->>> f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))
-datetime.date(2006, 10, 25)
->>> f.clean('2006-10-25')
-datetime.date(2006, 10, 25)
->>> f.clean('10/25/2006')
-datetime.date(2006, 10, 25)
->>> f.clean('10/25/06')
-datetime.date(2006, 10, 25)
->>> f.clean('Oct 25 2006')
-datetime.date(2006, 10, 25)
->>> f.clean('October 25 2006')
-datetime.date(2006, 10, 25)
->>> f.clean('October 25, 2006')
-datetime.date(2006, 10, 25)
->>> f.clean('25 October 2006')
-datetime.date(2006, 10, 25)
->>> f.clean('25 October, 2006')
-datetime.date(2006, 10, 25)
->>> f.clean('2006-4-31')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date.']
->>> f.clean('200a-10-25')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date.']
->>> f.clean('25/10/06')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
-
->>> f = DateField(required=False)
->>> f.clean(None)
->>> repr(f.clean(None))
-'None'
->>> f.clean('')
->>> repr(f.clean(''))
-'None'
-
-DateField accepts an optional input_formats parameter:
->>> f = DateField(input_formats=['%Y %m %d'])
->>> f.clean(datetime.date(2006, 10, 25))
-datetime.date(2006, 10, 25)
->>> f.clean(datetime.datetime(2006, 10, 25, 14, 30))
-datetime.date(2006, 10, 25)
->>> f.clean('2006 10 25')
-datetime.date(2006, 10, 25)
-
-The input_formats parameter overrides all default input formats,
-so the default formats won't work unless you specify them:
->>> f.clean('2006-10-25')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date.']
->>> f.clean('10/25/2006')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date.']
->>> f.clean('10/25/06')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date.']
-
-# TimeField ###################################################################
-
->>> import datetime
->>> f = TimeField()
->>> f.clean(datetime.time(14, 25))
-datetime.time(14, 25)
->>> f.clean(datetime.time(14, 25, 59))
-datetime.time(14, 25, 59)
->>> f.clean('14:25')
-datetime.time(14, 25)
->>> f.clean('14:25:59')
-datetime.time(14, 25, 59)
->>> f.clean('hello')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid time.']
->>> f.clean('1:24 p.m.')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid time.']
-
-TimeField accepts an optional input_formats parameter:
->>> f = TimeField(input_formats=['%I:%M %p'])
->>> f.clean(datetime.time(14, 25))
-datetime.time(14, 25)
->>> f.clean(datetime.time(14, 25, 59))
-datetime.time(14, 25, 59)
->>> f.clean('4:25 AM')
-datetime.time(4, 25)
->>> f.clean('4:25 PM')
-datetime.time(16, 25)
-
-The input_formats parameter overrides all default input formats,
-so the default formats won't work unless you specify them:
->>> f.clean('14:30:45')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid time.']
-
-# DateTimeField ###############################################################
-
->>> import datetime
->>> f = DateTimeField()
->>> f.clean(datetime.date(2006, 10, 25))
-datetime.datetime(2006, 10, 25, 0, 0)
->>> f.clean(datetime.datetime(2006, 10, 25, 14, 30))
-datetime.datetime(2006, 10, 25, 14, 30)
->>> f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))
-datetime.datetime(2006, 10, 25, 14, 30, 59)
->>> f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))
-datetime.datetime(2006, 10, 25, 14, 30, 59, 200)
->>> f.clean('2006-10-25 14:30:45')
-datetime.datetime(2006, 10, 25, 14, 30, 45)
->>> f.clean('2006-10-25 14:30:00')
-datetime.datetime(2006, 10, 25, 14, 30)
->>> f.clean('2006-10-25 14:30')
-datetime.datetime(2006, 10, 25, 14, 30)
->>> f.clean('2006-10-25')
-datetime.datetime(2006, 10, 25, 0, 0)
->>> f.clean('10/25/2006 14:30:45')
-datetime.datetime(2006, 10, 25, 14, 30, 45)
->>> f.clean('10/25/2006 14:30:00')
-datetime.datetime(2006, 10, 25, 14, 30)
->>> f.clean('10/25/2006 14:30')
-datetime.datetime(2006, 10, 25, 14, 30)
->>> f.clean('10/25/2006')
-datetime.datetime(2006, 10, 25, 0, 0)
->>> f.clean('10/25/06 14:30:45')
-datetime.datetime(2006, 10, 25, 14, 30, 45)
->>> f.clean('10/25/06 14:30:00')
-datetime.datetime(2006, 10, 25, 14, 30)
->>> f.clean('10/25/06 14:30')
-datetime.datetime(2006, 10, 25, 14, 30)
->>> f.clean('10/25/06')
-datetime.datetime(2006, 10, 25, 0, 0)
->>> f.clean('hello')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date/time.']
->>> f.clean('2006-10-25 4:30 p.m.')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date/time.']
-
-DateField accepts an optional input_formats parameter:
->>> f = DateTimeField(input_formats=['%Y %m %d %I:%M %p'])
->>> f.clean(datetime.date(2006, 10, 25))
-datetime.datetime(2006, 10, 25, 0, 0)
->>> f.clean(datetime.datetime(2006, 10, 25, 14, 30))
-datetime.datetime(2006, 10, 25, 14, 30)
->>> f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))
-datetime.datetime(2006, 10, 25, 14, 30, 59)
->>> f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))
-datetime.datetime(2006, 10, 25, 14, 30, 59, 200)
->>> f.clean('2006 10 25 2:30 PM')
-datetime.datetime(2006, 10, 25, 14, 30)
-
-The input_formats parameter overrides all default input formats,
-so the default formats won't work unless you specify them:
->>> f.clean('2006-10-25 14:30:45')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date/time.']
-
->>> f = DateTimeField(required=False)
->>> f.clean(None)
->>> repr(f.clean(None))
-'None'
->>> f.clean('')
->>> repr(f.clean(''))
-'None'
-
-# RegexField ##################################################################
-
->>> f = RegexField('^\d[A-F]\d$')
->>> f.clean('2A2')
-u'2A2'
->>> f.clean('3F3')
-u'3F3'
->>> f.clean('3G3')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid value.']
->>> f.clean(' 2A2')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid value.']
->>> f.clean('2A2 ')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid value.']
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
-
->>> f = RegexField('^\d[A-F]\d$', required=False)
->>> f.clean('2A2')
-u'2A2'
->>> f.clean('3F3')
-u'3F3'
->>> f.clean('3G3')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid value.']
->>> f.clean('')
-u''
-
-Alternatively, RegexField can take a compiled regular expression:
->>> f = RegexField(re.compile('^\d[A-F]\d$'))
->>> f.clean('2A2')
-u'2A2'
->>> f.clean('3F3')
-u'3F3'
->>> f.clean('3G3')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid value.']
->>> f.clean(' 2A2')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid value.']
->>> f.clean('2A2 ')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid value.']
-
-RegexField takes an optional error_message argument:
->>> f = RegexField('^\d\d\d\d$', error_message='Enter a four-digit number.')
->>> f.clean('1234')
-u'1234'
->>> f.clean('123')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a four-digit number.']
->>> f.clean('abcd')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a four-digit number.']
-
-RegexField also access min_length and max_length parameters, for convenience.
->>> f = RegexField('^\d+$', min_length=5, max_length=10)
->>> f.clean('123')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at least 5 characters.']
->>> f.clean('abc')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at least 5 characters.']
->>> f.clean('12345')
-u'12345'
->>> f.clean('1234567890')
-u'1234567890'
->>> f.clean('12345678901')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at most 10 characters.']
->>> f.clean('12345a')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid value.']
-
-# EmailField ##################################################################
-
->>> f = EmailField()
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('person@example.com')
-u'person@example.com'
->>> f.clean('foo')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid e-mail address.']
->>> f.clean('foo@')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid e-mail address.']
->>> f.clean('foo@bar')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid e-mail address.']
-
->>> f = EmailField(required=False)
->>> f.clean('')
-u''
->>> f.clean(None)
-u''
->>> f.clean('person@example.com')
-u'person@example.com'
->>> f.clean('foo')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid e-mail address.']
->>> f.clean('foo@')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid e-mail address.']
->>> f.clean('foo@bar')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid e-mail address.']
-
-EmailField also access min_length and max_length parameters, for convenience.
->>> f = EmailField(min_length=10, max_length=15)
->>> f.clean('a@foo.com')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at least 10 characters.']
->>> f.clean('alf@foo.com')
-u'alf@foo.com'
->>> f.clean('alf123456788@foo.com')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at most 15 characters.']
-
-# URLField ##################################################################
-
->>> f = URLField()
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('http://example.com')
-u'http://example.com'
->>> f.clean('http://www.example.com')
-u'http://www.example.com'
->>> f.clean('foo')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('example.com')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('http://')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('http://example')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('http://example.')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('http://.com')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
-
->>> f = URLField(required=False)
->>> f.clean('')
-u''
->>> f.clean(None)
-u''
->>> f.clean('http://example.com')
-u'http://example.com'
->>> f.clean('http://www.example.com')
-u'http://www.example.com'
->>> f.clean('foo')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('example.com')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('http://')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('http://example')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('http://example.')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('http://.com')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
-
-URLField takes an optional verify_exists parameter, which is False by default.
-This verifies that the URL is live on the Internet and doesn't return a 404 or 500:
->>> f = URLField(verify_exists=True)
->>> f.clean('http://www.google.com') # This will fail if there's no Internet connection
-u'http://www.google.com'
->>> f.clean('http://example')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid URL.']
->>> f.clean('http://www.jfoiwjfoi23jfoijoaijfoiwjofiwjefewl.com') # bad domain
-Traceback (most recent call last):
-...
-ValidationError: [u'This URL appears to be a broken link.']
->>> f.clean('http://google.com/we-love-microsoft.html') # good domain, bad page
-Traceback (most recent call last):
-...
-ValidationError: [u'This URL appears to be a broken link.']
->>> f = URLField(verify_exists=True, required=False)
->>> f.clean('')
-u''
->>> f.clean('http://www.google.com') # This will fail if there's no Internet connection
-u'http://www.google.com'
-
-EmailField also access min_length and max_length parameters, for convenience.
->>> f = URLField(min_length=15, max_length=20)
->>> f.clean('http://f.com')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at least 15 characters.']
->>> f.clean('http://example.com')
-u'http://example.com'
->>> f.clean('http://abcdefghijklmnopqrstuvwxyz.com')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at most 20 characters.']
-
-# BooleanField ################################################################
-
->>> f = BooleanField()
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(True)
-True
->>> f.clean(False)
-False
->>> f.clean(1)
-True
->>> f.clean(0)
-False
->>> f.clean('Django rocks')
-True
-
->>> f = BooleanField(required=False)
->>> f.clean('')
-False
->>> f.clean(None)
-False
->>> f.clean(True)
-True
->>> f.clean(False)
-False
->>> f.clean(1)
-True
->>> f.clean(0)
-False
->>> f.clean('Django rocks')
-True
-
-# ChoiceField #################################################################
-
->>> f = ChoiceField(choices=[('1', '1'), ('2', '2')])
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(1)
-u'1'
->>> f.clean('1')
-u'1'
->>> f.clean('3')
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. That choice is not one of the available choices.']
-
->>> f = ChoiceField(choices=[('1', '1'), ('2', '2')], required=False)
->>> f.clean('')
-u''
->>> f.clean(None)
-u''
->>> f.clean(1)
-u'1'
->>> f.clean('1')
-u'1'
->>> f.clean('3')
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. That choice is not one of the available choices.']
-
->>> f = ChoiceField(choices=[('J', 'John'), ('P', 'Paul')])
->>> f.clean('J')
-u'J'
->>> f.clean('John')
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. That choice is not one of the available choices.']
-
-# NullBooleanField ############################################################
-
->>> f = NullBooleanField()
->>> f.clean('')
->>> f.clean(True)
-True
->>> f.clean(False)
-False
->>> f.clean(None)
->>> f.clean('1')
->>> f.clean('2')
->>> f.clean('3')
->>> f.clean('hello')
-
-# MultipleChoiceField #########################################################
-
->>> f = MultipleChoiceField(choices=[('1', '1'), ('2', '2')])
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean([1])
-[u'1']
->>> f.clean(['1'])
-[u'1']
->>> f.clean(['1', '2'])
-[u'1', u'2']
->>> f.clean([1, '2'])
-[u'1', u'2']
->>> f.clean((1, '2'))
-[u'1', u'2']
->>> f.clean('hello')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a list of values.']
->>> f.clean([])
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(())
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(['3'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. 3 is not one of the available choices.']
-
->>> f = MultipleChoiceField(choices=[('1', '1'), ('2', '2')], required=False)
->>> f.clean('')
-[]
->>> f.clean(None)
-[]
->>> f.clean([1])
-[u'1']
->>> f.clean(['1'])
-[u'1']
->>> f.clean(['1', '2'])
-[u'1', u'2']
->>> f.clean([1, '2'])
-[u'1', u'2']
->>> f.clean((1, '2'))
-[u'1', u'2']
->>> f.clean('hello')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a list of values.']
->>> f.clean([])
-[]
->>> f.clean(())
-[]
->>> f.clean(['3'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Select a valid choice. 3 is not one of the available choices.']
-
-# ComboField ##################################################################
-
-ComboField takes a list of fields that should be used to validate a value,
-in that order.
->>> f = ComboField(fields=[CharField(max_length=20), EmailField()])
->>> f.clean('test@example.com')
-u'test@example.com'
->>> f.clean('longemailaddress@example.com')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at most 20 characters.']
->>> f.clean('not an e-mail')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid e-mail address.']
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
-
->>> f = ComboField(fields=[CharField(max_length=20), EmailField()], required=False)
->>> f.clean('test@example.com')
-u'test@example.com'
->>> f.clean('longemailaddress@example.com')
-Traceback (most recent call last):
-...
-ValidationError: [u'Ensure this value has at most 20 characters.']
->>> f.clean('not an e-mail')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid e-mail address.']
->>> f.clean('')
-u''
->>> f.clean(None)
-u''
-
-# SplitDateTimeField ##########################################################
-
->>> f = SplitDateTimeField()
->>> f.clean([datetime.date(2006, 1, 10), datetime.time(7, 30)])
-datetime.datetime(2006, 1, 10, 7, 30)
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('hello')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a list of values.']
->>> f.clean(['hello', 'there'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date.', u'Enter a valid time.']
->>> f.clean(['2006-01-10', 'there'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid time.']
->>> f.clean(['hello', '07:30'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date.']
-
->>> f = SplitDateTimeField(required=False)
->>> f.clean([datetime.date(2006, 1, 10), datetime.time(7, 30)])
-datetime.datetime(2006, 1, 10, 7, 30)
->>> f.clean(None)
->>> f.clean('')
->>> f.clean('hello')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a list of values.']
->>> f.clean(['hello', 'there'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date.', u'Enter a valid time.']
->>> f.clean(['2006-01-10', 'there'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid time.']
->>> f.clean(['hello', '07:30'])
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a valid date.']
-
-#########
-# Forms #
-#########
-
-A Form is a collection of Fields. It knows how to validate a set of data and it
-knows how to render itself in a couple of default ways (e.g., an HTML table).
-You can pass it data in __init__(), as a dictionary.
-
-# Form ########################################################################
-
->>> class Person(Form):
-...     first_name = CharField()
-...     last_name = CharField()
-...     birthday = DateField()
-
-Pass a dictionary to a Form's __init__().
->>> p = Person({'first_name': u'John', 'last_name': u'Lennon', 'birthday': u'1940-10-9'})
->>> p.is_bound
-True
->>> p.errors
-{}
->>> p.is_valid()
-True
->>> p.errors.as_ul()
-u''
->>> p.errors.as_text()
-u''
->>> p.clean_data
-{'first_name': u'John', 'last_name': u'Lennon', 'birthday': datetime.date(1940, 10, 9)}
->>> print p['first_name']
-<input type="text" name="first_name" value="John" id="id_first_name" />
->>> print p['last_name']
-<input type="text" name="last_name" value="Lennon" id="id_last_name" />
->>> print p['birthday']
-<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />
->>> print p['nonexistentfield']
-Traceback (most recent call last):
-...
-KeyError: "Key 'nonexistentfield' not found in Form"
-
->>> for boundfield in p:
-...     print boundfield
-<input type="text" name="first_name" value="John" id="id_first_name" />
-<input type="text" name="last_name" value="Lennon" id="id_last_name" />
-<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />
->>> for boundfield in p:
-...     print boundfield.label, boundfield.data
-First name John
-Last name Lennon
-Birthday 1940-10-9
->>> print p
-<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>
-<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" value="Lennon" id="id_last_name" /></td></tr>
-<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></td></tr>
-
-Empty dictionaries are valid, too.
->>> p = Person({})
->>> p.is_bound
-True
->>> p.errors
-{'first_name': [u'This field is required.'], 'last_name': [u'This field is required.'], 'birthday': [u'This field is required.']}
->>> p.is_valid()
-False
->>> p.clean_data
-Traceback (most recent call last):
-...
-AttributeError: 'Person' object has no attribute 'clean_data'
->>> print p
-<tr><th><label for="id_first_name">First name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="first_name" id="id_first_name" /></td></tr>
-<tr><th><label for="id_last_name">Last name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="last_name" id="id_last_name" /></td></tr>
-<tr><th><label for="id_birthday">Birthday:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="birthday" id="id_birthday" /></td></tr>
->>> print p.as_table()
-<tr><th><label for="id_first_name">First name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="first_name" id="id_first_name" /></td></tr>
-<tr><th><label for="id_last_name">Last name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="last_name" id="id_last_name" /></td></tr>
-<tr><th><label for="id_birthday">Birthday:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="birthday" id="id_birthday" /></td></tr>
->>> print p.as_ul()
-<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
-<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
-<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></li>
->>> print p.as_p()
-<p><ul class="errorlist"><li>This field is required.</li></ul></p>
-<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
-<p><ul class="errorlist"><li>This field is required.</li></ul></p>
-<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
-<p><ul class="errorlist"><li>This field is required.</li></ul></p>
-<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></p>
-
-If you don't pass any values to the Form's __init__(), or if you pass None,
-the Form will be considered unbound and won't do any validation. Form.errors
-will be an empty dictionary *but* Form.is_valid() will return False.
->>> p = Person()
->>> p.is_bound
-False
->>> p.errors
-{}
->>> p.is_valid()
-False
->>> p.clean_data
-Traceback (most recent call last):
-...
-AttributeError: 'Person' object has no attribute 'clean_data'
->>> print p
-<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
-<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
-<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /></td></tr>
->>> print p.as_table()
-<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
-<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
-<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /></td></tr>
->>> print p.as_ul()
-<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
-<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
-<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></li>
->>> print p.as_p()
-<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
-<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
-<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></p>
-
-Unicode values are handled properly.
->>> p = Person({'first_name': u'John', 'last_name': u'\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111', 'birthday': '1940-10-9'})
->>> p.as_table()
-u'<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>\n<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></td></tr>\n<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></td></tr>'
->>> p.as_ul()
-u'<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" value="John" id="id_first_name" /></li>\n<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></li>\n<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></li>'
->>> p.as_p()
-u'<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" value="John" id="id_first_name" /></p>\n<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></p>\n<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></p>'
-
->>> p = Person({'last_name': u'Lennon'})
->>> p.errors
-{'first_name': [u'This field is required.'], 'birthday': [u'This field is required.']}
->>> p.is_valid()
-False
->>> p.errors.as_ul()
-u'<ul class="errorlist"><li>first_name<ul class="errorlist"><li>This field is required.</li></ul></li><li>birthday<ul class="errorlist"><li>This field is required.</li></ul></li></ul>'
->>> print p.errors.as_text()
-* first_name
-  * This field is required.
-* birthday
-  * This field is required.
->>> p.clean_data
-Traceback (most recent call last):
-...
-AttributeError: 'Person' object has no attribute 'clean_data'
->>> p['first_name'].errors
-[u'This field is required.']
->>> p['first_name'].errors.as_ul()
-u'<ul class="errorlist"><li>This field is required.</li></ul>'
->>> p['first_name'].errors.as_text()
-u'* This field is required.'
-
->>> p = Person()
->>> print p['first_name']
-<input type="text" name="first_name" id="id_first_name" />
->>> print p['last_name']
-<input type="text" name="last_name" id="id_last_name" />
->>> print p['birthday']
-<input type="text" name="birthday" id="id_birthday" />
-
-clean_data will always *only* contain a key for fields defined in the
-Form, even if you pass extra data when you define the Form. In this
-example, we pass a bunch of extra fields to the form constructor,
-but clean_data contains only the form's fields.
->>> data = {'first_name': u'John', 'last_name': u'Lennon', 'birthday': u'1940-10-9', 'extra1': 'hello', 'extra2': 'hello'}
->>> p = Person(data)
->>> p.is_valid()
-True
->>> p.clean_data
-{'first_name': u'John', 'last_name': u'Lennon', 'birthday': datetime.date(1940, 10, 9)}
-
-"auto_id" tells the Form to add an "id" attribute to each form element.
-If it's a string that contains '%s', Django will use that as a format string
-into which the field's name will be inserted. It will also put a <label> around
-the human-readable labels for a field.
->>> p = Person(auto_id='%s_id')
->>> print p.as_table()
-<tr><th><label for="first_name_id">First name:</label></th><td><input type="text" name="first_name" id="first_name_id" /></td></tr>
-<tr><th><label for="last_name_id">Last name:</label></th><td><input type="text" name="last_name" id="last_name_id" /></td></tr>
-<tr><th><label for="birthday_id">Birthday:</label></th><td><input type="text" name="birthday" id="birthday_id" /></td></tr>
->>> print p.as_ul()
-<li><label for="first_name_id">First name:</label> <input type="text" name="first_name" id="first_name_id" /></li>
-<li><label for="last_name_id">Last name:</label> <input type="text" name="last_name" id="last_name_id" /></li>
-<li><label for="birthday_id">Birthday:</label> <input type="text" name="birthday" id="birthday_id" /></li>
->>> print p.as_p()
-<p><label for="first_name_id">First name:</label> <input type="text" name="first_name" id="first_name_id" /></p>
-<p><label for="last_name_id">Last name:</label> <input type="text" name="last_name" id="last_name_id" /></p>
-<p><label for="birthday_id">Birthday:</label> <input type="text" name="birthday" id="birthday_id" /></p>
-
-If auto_id is any True value whose str() does not contain '%s', the "id"
-attribute will be the name of the field.
->>> p = Person(auto_id=True)
->>> print p.as_ul()
-<li><label for="first_name">First name:</label> <input type="text" name="first_name" id="first_name" /></li>
-<li><label for="last_name">Last name:</label> <input type="text" name="last_name" id="last_name" /></li>
-<li><label for="birthday">Birthday:</label> <input type="text" name="birthday" id="birthday" /></li>
-
-If auto_id is any False value, an "id" attribute won't be output unless it
-was manually entered.
->>> p = Person(auto_id=False)
->>> print p.as_ul()
-<li>First name: <input type="text" name="first_name" /></li>
-<li>Last name: <input type="text" name="last_name" /></li>
-<li>Birthday: <input type="text" name="birthday" /></li>
-
-In this example, auto_id is False, but the "id" attribute for the "first_name"
-field is given. Also note that field gets a <label>, while the others don't.
->>> class PersonNew(Form):
-...     first_name = CharField(widget=TextInput(attrs={'id': 'first_name_id'}))
-...     last_name = CharField()
-...     birthday = DateField()
->>> p = PersonNew(auto_id=False)
->>> print p.as_ul()
-<li><label for="first_name_id">First name:</label> <input type="text" id="first_name_id" name="first_name" /></li>
-<li>Last name: <input type="text" name="last_name" /></li>
-<li>Birthday: <input type="text" name="birthday" /></li>
-
-If the "id" attribute is specified in the Form and auto_id is True, the "id"
-attribute in the Form gets precedence.
->>> p = PersonNew(auto_id=True)
->>> print p.as_ul()
-<li><label for="first_name_id">First name:</label> <input type="text" id="first_name_id" name="first_name" /></li>
-<li><label for="last_name">Last name:</label> <input type="text" name="last_name" id="last_name" /></li>
-<li><label for="birthday">Birthday:</label> <input type="text" name="birthday" id="birthday" /></li>
-
->>> class SignupForm(Form):
-...     email = EmailField()
-...     get_spam = BooleanField()
->>> f = SignupForm(auto_id=False)
->>> print f['email']
-<input type="text" name="email" />
->>> print f['get_spam']
-<input type="checkbox" name="get_spam" />
-
->>> f = SignupForm({'email': 'test@example.com', 'get_spam': True}, auto_id=False)
->>> print f['email']
-<input type="text" name="email" value="test@example.com" />
->>> print f['get_spam']
-<input checked="checked" type="checkbox" name="get_spam" />
-
-Any Field can have a Widget class passed to its constructor:
->>> class ContactForm(Form):
-...     subject = CharField()
-...     message = CharField(widget=Textarea)
->>> f = ContactForm(auto_id=False)
->>> print f['subject']
-<input type="text" name="subject" />
->>> print f['message']
-<textarea name="message"></textarea>
-
-as_textarea(), as_text() and as_hidden() are shortcuts for changing the output
-widget type:
->>> f['subject'].as_textarea()
-u'<textarea name="subject"></textarea>'
->>> f['message'].as_text()
-u'<input type="text" name="message" />'
->>> f['message'].as_hidden()
-u'<input type="hidden" name="message" />'
-
-The 'widget' parameter to a Field can also be an instance:
->>> class ContactForm(Form):
-...     subject = CharField()
-...     message = CharField(widget=Textarea(attrs={'rows': 80, 'cols': 20}))
->>> f = ContactForm(auto_id=False)
->>> print f['message']
-<textarea rows="80" cols="20" name="message"></textarea>
-
-Instance-level attrs are *not* carried over to as_textarea(), as_text() and
-as_hidden():
->>> f['message'].as_text()
-u'<input type="text" name="message" />'
->>> f = ContactForm({'subject': 'Hello', 'message': 'I love you.'}, auto_id=False)
->>> f['subject'].as_textarea()
-u'<textarea name="subject">Hello</textarea>'
->>> f['message'].as_text()
-u'<input type="text" name="message" value="I love you." />'
->>> f['message'].as_hidden()
-u'<input type="hidden" name="message" value="I love you." />'
-
-For a form with a <select>, use ChoiceField:
->>> class FrameworkForm(Form):
-...     name = CharField()
-...     language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')])
->>> f = FrameworkForm(auto_id=False)
->>> print f['language']
-<select name="language">
-<option value="P">Python</option>
-<option value="J">Java</option>
-</select>
->>> f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
->>> print f['language']
-<select name="language">
-<option value="P" selected="selected">Python</option>
-<option value="J">Java</option>
-</select>
-
-A subtlety: If one of the choices' value is the empty string and the form is
-unbound, then the <option> for the empty-string choice will get selected="selected".
->>> class FrameworkForm(Form):
-...     name = CharField()
-...     language = ChoiceField(choices=[('', '------'), ('P', 'Python'), ('J', 'Java')])
->>> f = FrameworkForm(auto_id=False)
->>> print f['language']
-<select name="language">
-<option value="" selected="selected">------</option>
-<option value="P">Python</option>
-<option value="J">Java</option>
-</select>
-
-You can specify widget attributes in the Widget constructor.
->>> class FrameworkForm(Form):
-...     name = CharField()
-...     language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=Select(attrs={'class': 'foo'}))
->>> f = FrameworkForm(auto_id=False)
->>> print f['language']
-<select class="foo" name="language">
-<option value="P">Python</option>
-<option value="J">Java</option>
-</select>
->>> f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
->>> print f['language']
-<select class="foo" name="language">
-<option value="P" selected="selected">Python</option>
-<option value="J">Java</option>
-</select>
-
-When passing a custom widget instance to ChoiceField, note that setting
-'choices' on the widget is meaningless. The widget will use the choices
-defined on the Field, not the ones defined on the Widget.
->>> class FrameworkForm(Form):
-...     name = CharField()
-...     language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=Select(choices=[('R', 'Ruby'), ('P', 'Perl')], attrs={'class': 'foo'}))
->>> f = FrameworkForm(auto_id=False)
->>> print f['language']
-<select class="foo" name="language">
-<option value="P">Python</option>
-<option value="J">Java</option>
-</select>
->>> f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
->>> print f['language']
-<select class="foo" name="language">
-<option value="P" selected="selected">Python</option>
-<option value="J">Java</option>
-</select>
-
-You can set a ChoiceField's choices after the fact.
->>> class FrameworkForm(Form):
-...     name = CharField()
-...     language = ChoiceField()
->>> f = FrameworkForm(auto_id=False)
->>> print f['language']
-<select name="language">
-</select>
->>> f.fields['language'].choices = [('P', 'Python'), ('J', 'Java')]
->>> print f['language']
-<select name="language">
-<option value="P">Python</option>
-<option value="J">Java</option>
-</select>
-
-Add widget=RadioSelect to use that widget with a ChoiceField.
->>> class FrameworkForm(Form):
-...     name = CharField()
-...     language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=RadioSelect)
->>> f = FrameworkForm(auto_id=False)
->>> print f['language']
-<ul>
-<li><label><input type="radio" name="language" value="P" /> Python</label></li>
-<li><label><input type="radio" name="language" value="J" /> Java</label></li>
-</ul>
->>> print f
-<tr><th>Name:</th><td><input type="text" name="name" /></td></tr>
-<tr><th>Language:</th><td><ul>
-<li><label><input type="radio" name="language" value="P" /> Python</label></li>
-<li><label><input type="radio" name="language" value="J" /> Java</label></li>
-</ul></td></tr>
->>> print f.as_ul()
-<li>Name: <input type="text" name="name" /></li>
-<li>Language: <ul>
-<li><label><input type="radio" name="language" value="P" /> Python</label></li>
-<li><label><input type="radio" name="language" value="J" /> Java</label></li>
-</ul></li>
-
-Regarding auto_id and <label>, RadioSelect is a special case. Each radio button
-gets a distinct ID, formed by appending an underscore plus the button's
-zero-based index.
->>> f = FrameworkForm(auto_id='id_%s')
->>> print f['language']
-<ul>
-<li><label><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
-<li><label><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
-</ul>
-
-When RadioSelect is used with auto_id, and the whole form is printed using
-either as_table() or as_ul(), the label for the RadioSelect will point to the
-ID of the *first* radio button.
->>> print f
-<tr><th><label for="id_name">Name:</label></th><td><input type="text" name="name" id="id_name" /></td></tr>
-<tr><th><label for="id_language_0">Language:</label></th><td><ul>
-<li><label><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
-<li><label><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
-</ul></td></tr>
->>> print f.as_ul()
-<li><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></li>
-<li><label for="id_language_0">Language:</label> <ul>
-<li><label><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
-<li><label><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
-</ul></li>
->>> print f.as_p()
-<p><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></p>
-<p><label for="id_language_0">Language:</label> <ul>
-<li><label><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
-<li><label><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
-</ul></p>
-
-MultipleChoiceField is a special case, as its data is required to be a list:
->>> class SongForm(Form):
-...     name = CharField()
-...     composers = MultipleChoiceField()
->>> f = SongForm(auto_id=False)
->>> print f['composers']
-<select multiple="multiple" name="composers">
-</select>
->>> class SongForm(Form):
-...     name = CharField()
-...     composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')])
->>> f = SongForm(auto_id=False)
->>> print f['composers']
-<select multiple="multiple" name="composers">
-<option value="J">John Lennon</option>
-<option value="P">Paul McCartney</option>
-</select>
->>> f = SongForm({'name': 'Yesterday', 'composers': ['P']}, auto_id=False)
->>> print f['name']
-<input type="text" name="name" value="Yesterday" />
->>> print f['composers']
-<select multiple="multiple" name="composers">
-<option value="J">John Lennon</option>
-<option value="P" selected="selected">Paul McCartney</option>
-</select>
-
-MultipleChoiceField rendered as_hidden() is a special case. Because it can
-have multiple values, its as_hidden() renders multiple <input type="hidden">
-tags.
->>> f = SongForm({'name': 'Yesterday', 'composers': ['P']}, auto_id=False)
->>> print f['composers'].as_hidden()
-<input type="hidden" name="composers" value="P" />
->>> f = SongForm({'name': 'From Me To You', 'composers': ['P', 'J']}, auto_id=False)
->>> print f['composers'].as_hidden()
-<input type="hidden" name="composers" value="P" />
-<input type="hidden" name="composers" value="J" />
-
-MultipleChoiceField can also be used with the CheckboxSelectMultiple widget.
->>> class SongForm(Form):
-...     name = CharField()
-...     composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=CheckboxSelectMultiple)
->>> f = SongForm(auto_id=False)
->>> print f['composers']
-<ul>
-<li><label><input type="checkbox" name="composers" value="J" /> John Lennon</label></li>
-<li><label><input type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
-</ul>
->>> f = SongForm({'composers': ['J']}, auto_id=False)
->>> print f['composers']
-<ul>
-<li><label><input checked="checked" type="checkbox" name="composers" value="J" /> John Lennon</label></li>
-<li><label><input type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
-</ul>
->>> f = SongForm({'composers': ['J', 'P']}, auto_id=False)
->>> print f['composers']
-<ul>
-<li><label><input checked="checked" type="checkbox" name="composers" value="J" /> John Lennon</label></li>
-<li><label><input checked="checked" type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
-</ul>
-
-Regarding auto_id, CheckboxSelectMultiple is a special case. Each checkbox
-gets a distinct ID, formed by appending an underscore plus the checkbox's
-zero-based index.
->>> f = SongForm(auto_id='%s_id')
->>> print f['composers']
-<ul>
-<li><label><input type="checkbox" name="composers" value="J" id="composers_id_0" /> John Lennon</label></li>
-<li><label><input type="checkbox" name="composers" value="P" id="composers_id_1" /> Paul McCartney</label></li>
-</ul>
-
-Data for a MultipleChoiceField should be a list. QueryDict and MultiValueDict
-conveniently work with this.
->>> data = {'name': 'Yesterday', 'composers': ['J', 'P']}
->>> f = SongForm(data)
->>> f.errors
-{}
->>> from django.http import QueryDict
->>> data = QueryDict('name=Yesterday&composers=J&composers=P')
->>> f = SongForm(data)
->>> f.errors
-{}
->>> from django.utils.datastructures import MultiValueDict
->>> data = MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P']))
->>> f = SongForm(data)
->>> f.errors
-{}
-
-The MultipleHiddenInput widget renders multiple values as hidden fields.
->>> class SongFormHidden(Form):
-...     name = CharField()
-...     composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=MultipleHiddenInput)
->>> f = SongFormHidden(MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P'])), auto_id=False)
->>> print f.as_ul()
-<li>Name: <input type="text" name="name" value="Yesterday" /><input type="hidden" name="composers" value="J" />
-<input type="hidden" name="composers" value="P" /></li>
-
-When using CheckboxSelectMultiple, the framework expects a list of input and
-returns a list of input.
->>> f = SongForm({'name': 'Yesterday'}, auto_id=False)
->>> f.errors
-{'composers': [u'This field is required.']}
->>> f = SongForm({'name': 'Yesterday', 'composers': ['J']}, auto_id=False)
->>> f.errors
-{}
->>> f.clean_data
-{'composers': [u'J'], 'name': u'Yesterday'}
->>> f = SongForm({'name': 'Yesterday', 'composers': ['J', 'P']}, auto_id=False)
->>> f.errors
-{}
->>> f.clean_data
-{'composers': [u'J', u'P'], 'name': u'Yesterday'}
-
-Validation errors are HTML-escaped when output as HTML.
->>> class EscapingForm(Form):
-...     special_name = CharField()
-...     def clean_special_name(self):
-...         raise ValidationError("Something's wrong with '%s'" % self.clean_data['special_name'])
-
->>> f = EscapingForm({'special_name': "Nothing to escape"}, auto_id=False)
->>> print f
-<tr><th>Special name:</th><td><ul class="errorlist"><li>Something&#39;s wrong with &#39;Nothing to escape&#39;</li></ul><input type="text" name="special_name" value="Nothing to escape" /></td></tr>
->>> f = EscapingForm({'special_name': "Should escape < & > and <script>alert('xss')</script>"}, auto_id=False)
->>> print f
-<tr><th>Special name:</th><td><ul class="errorlist"><li>Something&#39;s wrong with &#39;Should escape &lt; &amp; &gt; and &lt;script&gt;alert(&#39;xss&#39;)&lt;/script&gt;&#39;</li></ul><input type="text" name="special_name" value="Should escape &lt; &amp; &gt; and &lt;script&gt;alert(&#39;xss&#39;)&lt;/script&gt;" /></td></tr>
-
-# Validating multiple fields in relation to another ###########################
-
-There are a couple of ways to do multiple-field validation. If you want the
-validation message to be associated with a particular field, implement the
-clean_XXX() method on the Form, where XXX is the field name. As in
-Field.clean(), the clean_XXX() method should return the cleaned value. In the
-clean_XXX() method, you have access to self.clean_data, which is a dictionary
-of all the data that has been cleaned *so far*, in order by the fields,
-including the current field (e.g., the field XXX if you're in clean_XXX()).
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10)
-...    password1 = CharField(widget=PasswordInput)
-...    password2 = CharField(widget=PasswordInput)
-...    def clean_password2(self):
-...        if self.clean_data.get('password1') and self.clean_data.get('password2') and self.clean_data['password1'] != self.clean_data['password2']:
-...            raise ValidationError(u'Please make sure your passwords match.')
-...        return self.clean_data['password2']
->>> f = UserRegistration(auto_id=False)
->>> f.errors
-{}
->>> f = UserRegistration({}, auto_id=False)
->>> f.errors
-{'username': [u'This field is required.'], 'password1': [u'This field is required.'], 'password2': [u'This field is required.']}
->>> f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
->>> f.errors
-{'password2': [u'Please make sure your passwords match.']}
->>> f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'foo'}, auto_id=False)
->>> f.errors
-{}
->>> f.clean_data
-{'username': u'adrian', 'password1': u'foo', 'password2': u'foo'}
-
-Another way of doing multiple-field validation is by implementing the
-Form's clean() method. If you do this, any ValidationError raised by that
-method will not be associated with a particular field; it will have a
-special-case association with the field named '__all__'.
-Note that in Form.clean(), you have access to self.clean_data, a dictionary of
-all the fields/values that have *not* raised a ValidationError. Also note
-Form.clean() is required to return a dictionary of all clean data.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10)
-...    password1 = CharField(widget=PasswordInput)
-...    password2 = CharField(widget=PasswordInput)
-...    def clean(self):
-...        if self.clean_data.get('password1') and self.clean_data.get('password2') and self.clean_data['password1'] != self.clean_data['password2']:
-...            raise ValidationError(u'Please make sure your passwords match.')
-...        return self.clean_data
->>> f = UserRegistration(auto_id=False)
->>> f.errors
-{}
->>> f = UserRegistration({}, auto_id=False)
->>> print f.as_table()
-<tr><th>Username:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="username" maxlength="10" /></td></tr>
-<tr><th>Password1:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="password" name="password1" /></td></tr>
-<tr><th>Password2:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="password" name="password2" /></td></tr>
->>> f.errors
-{'username': [u'This field is required.'], 'password1': [u'This field is required.'], 'password2': [u'This field is required.']}
->>> f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
->>> f.errors
-{'__all__': [u'Please make sure your passwords match.']}
->>> print f.as_table()
-<tr><td colspan="2"><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></td></tr>
-<tr><th>Username:</th><td><input type="text" name="username" value="adrian" maxlength="10" /></td></tr>
-<tr><th>Password1:</th><td><input type="password" name="password1" value="foo" /></td></tr>
-<tr><th>Password2:</th><td><input type="password" name="password2" value="bar" /></td></tr>
->>> print f.as_ul()
-<li><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></li>
-<li>Username: <input type="text" name="username" value="adrian" maxlength="10" /></li>
-<li>Password1: <input type="password" name="password1" value="foo" /></li>
-<li>Password2: <input type="password" name="password2" value="bar" /></li>
->>> f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'foo'}, auto_id=False)
->>> f.errors
-{}
->>> f.clean_data
-{'username': u'adrian', 'password1': u'foo', 'password2': u'foo'}
-
-# Dynamic construction ########################################################
-
-It's possible to construct a Form dynamically by adding to the self.fields
-dictionary in __init__(). Don't forget to call Form.__init__() within the
-subclass' __init__().
->>> class Person(Form):
-...     first_name = CharField()
-...     last_name = CharField()
-...     def __init__(self, *args, **kwargs):
-...         super(Person, self).__init__(*args, **kwargs)
-...         self.fields['birthday'] = DateField()
->>> p = Person(auto_id=False)
->>> print p
-<tr><th>First name:</th><td><input type="text" name="first_name" /></td></tr>
-<tr><th>Last name:</th><td><input type="text" name="last_name" /></td></tr>
-<tr><th>Birthday:</th><td><input type="text" name="birthday" /></td></tr>
-
-Instances of a dynamic Form do not persist fields from one Form instance to
-the next.
->>> class MyForm(Form):
-...     def __init__(self, data=None, auto_id=False, field_list=[]):
-...         Form.__init__(self, data, auto_id)
-...         for field in field_list:
-...             self.fields[field[0]] = field[1]
->>> field_list = [('field1', CharField()), ('field2', CharField())]
->>> my_form = MyForm(field_list=field_list)
->>> print my_form
-<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
-<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>
->>> field_list = [('field3', CharField()), ('field4', CharField())]
->>> my_form = MyForm(field_list=field_list)
->>> print my_form
-<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
-<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>
-
->>> class MyForm(Form):
-...     default_field_1 = CharField()
-...     default_field_2 = CharField()
-...     def __init__(self, data=None, auto_id=False, field_list=[]):
-...         Form.__init__(self, data, auto_id)
-...         for field in field_list:
-...             self.fields[field[0]] = field[1]
->>> field_list = [('field1', CharField()), ('field2', CharField())]
->>> my_form = MyForm(field_list=field_list)
->>> print my_form
-<tr><th>Default field 1:</th><td><input type="text" name="default_field_1" /></td></tr>
-<tr><th>Default field 2:</th><td><input type="text" name="default_field_2" /></td></tr>
-<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
-<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>
->>> field_list = [('field3', CharField()), ('field4', CharField())]
->>> my_form = MyForm(field_list=field_list)
->>> print my_form
-<tr><th>Default field 1:</th><td><input type="text" name="default_field_1" /></td></tr>
-<tr><th>Default field 2:</th><td><input type="text" name="default_field_2" /></td></tr>
-<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
-<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>
-
-Similarly, changes to field attributes do not persist from one Form instance
-to the next.
->>> class Person(Form):
-...     first_name = CharField(required=False)
-...     last_name = CharField(required=False)
-...     def __init__(self, names_required=False, *args, **kwargs):
-...         super(Person, self).__init__(*args, **kwargs)
-...         if names_required:
-...             self.fields['first_name'].required = True
-...             self.fields['last_name'].required = True
->>> f = Person(names_required=False)
->>> f['first_name'].field.required, f['last_name'].field.required
-(False, False)
->>> f = Person(names_required=True)
->>> f['first_name'].field.required, f['last_name'].field.required
-(True, True)
->>> f = Person(names_required=False)
->>> f['first_name'].field.required, f['last_name'].field.required
-(False, False)
->>> class Person(Form):
-...     first_name = CharField(max_length=30)
-...     last_name = CharField(max_length=30)
-...     def __init__(self, name_max_length=None, *args, **kwargs):
-...         super(Person, self).__init__(*args, **kwargs)
-...         if name_max_length:
-...             self.fields['first_name'].max_length = name_max_length
-...             self.fields['last_name'].max_length = name_max_length
->>> f = Person(name_max_length=None)
->>> f['first_name'].field.max_length, f['last_name'].field.max_length
-(30, 30)
->>> f = Person(name_max_length=20)
->>> f['first_name'].field.max_length, f['last_name'].field.max_length
-(20, 20)
->>> f = Person(name_max_length=None)
->>> f['first_name'].field.max_length, f['last_name'].field.max_length
-(30, 30)
-
-HiddenInput widgets are displayed differently in the as_table(), as_ul()
-and as_p() output of a Form -- their verbose names are not displayed, and a
-separate row is not displayed. They're displayed in the last row of the
-form, directly after that row's form element.
->>> class Person(Form):
-...     first_name = CharField()
-...     last_name = CharField()
-...     hidden_text = CharField(widget=HiddenInput)
-...     birthday = DateField()
->>> p = Person(auto_id=False)
->>> print p
-<tr><th>First name:</th><td><input type="text" name="first_name" /></td></tr>
-<tr><th>Last name:</th><td><input type="text" name="last_name" /></td></tr>
-<tr><th>Birthday:</th><td><input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></td></tr>
->>> print p.as_ul()
-<li>First name: <input type="text" name="first_name" /></li>
-<li>Last name: <input type="text" name="last_name" /></li>
-<li>Birthday: <input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></li>
->>> print p.as_p()
-<p>First name: <input type="text" name="first_name" /></p>
-<p>Last name: <input type="text" name="last_name" /></p>
-<p>Birthday: <input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></p>
-
-With auto_id set, a HiddenInput still gets an ID, but it doesn't get a label.
->>> p = Person(auto_id='id_%s')
->>> print p
-<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
-<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
-<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></td></tr>
->>> print p.as_ul()
-<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
-<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
-<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></li>
->>> print p.as_p()
-<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
-<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
-<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></p>
-
-If a field with a HiddenInput has errors, the as_table() and as_ul() output
-will include the error message(s) with the text "(Hidden field [fieldname]) "
-prepended. This message is displayed at the top of the output, regardless of
-its field's order in the form.
->>> p = Person({'first_name': 'John', 'last_name': 'Lennon', 'birthday': '1940-10-9'}, auto_id=False)
->>> print p
-<tr><td colspan="2"><ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul></td></tr>
-<tr><th>First name:</th><td><input type="text" name="first_name" value="John" /></td></tr>
-<tr><th>Last name:</th><td><input type="text" name="last_name" value="Lennon" /></td></tr>
-<tr><th>Birthday:</th><td><input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></td></tr>
->>> print p.as_ul()
-<li><ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul></li>
-<li>First name: <input type="text" name="first_name" value="John" /></li>
-<li>Last name: <input type="text" name="last_name" value="Lennon" /></li>
-<li>Birthday: <input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></li>
->>> print p.as_p()
-<p><ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul></p>
-<p>First name: <input type="text" name="first_name" value="John" /></p>
-<p>Last name: <input type="text" name="last_name" value="Lennon" /></p>
-<p>Birthday: <input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></p>
-
-A corner case: It's possible for a form to have only HiddenInputs.
->>> class TestForm(Form):
-...     foo = CharField(widget=HiddenInput)
-...     bar = CharField(widget=HiddenInput)
->>> p = TestForm(auto_id=False)
->>> print p.as_table()
-<input type="hidden" name="foo" /><input type="hidden" name="bar" />
->>> print p.as_ul()
-<input type="hidden" name="foo" /><input type="hidden" name="bar" />
->>> print p.as_p()
-<input type="hidden" name="foo" /><input type="hidden" name="bar" />
-
-A Form's fields are displayed in the same order in which they were defined.
->>> class TestForm(Form):
-...     field1 = CharField()
-...     field2 = CharField()
-...     field3 = CharField()
-...     field4 = CharField()
-...     field5 = CharField()
-...     field6 = CharField()
-...     field7 = CharField()
-...     field8 = CharField()
-...     field9 = CharField()
-...     field10 = CharField()
-...     field11 = CharField()
-...     field12 = CharField()
-...     field13 = CharField()
-...     field14 = CharField()
->>> p = TestForm(auto_id=False)
->>> print p
-<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
-<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>
-<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
-<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>
-<tr><th>Field5:</th><td><input type="text" name="field5" /></td></tr>
-<tr><th>Field6:</th><td><input type="text" name="field6" /></td></tr>
-<tr><th>Field7:</th><td><input type="text" name="field7" /></td></tr>
-<tr><th>Field8:</th><td><input type="text" name="field8" /></td></tr>
-<tr><th>Field9:</th><td><input type="text" name="field9" /></td></tr>
-<tr><th>Field10:</th><td><input type="text" name="field10" /></td></tr>
-<tr><th>Field11:</th><td><input type="text" name="field11" /></td></tr>
-<tr><th>Field12:</th><td><input type="text" name="field12" /></td></tr>
-<tr><th>Field13:</th><td><input type="text" name="field13" /></td></tr>
-<tr><th>Field14:</th><td><input type="text" name="field14" /></td></tr>
-
-Some Field classes have an effect on the HTML attributes of their associated
-Widget. If you set max_length in a CharField and its associated widget is
-either a TextInput or PasswordInput, then the widget's rendered HTML will
-include the "maxlength" attribute.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10)                   # uses TextInput by default
-...    password = CharField(max_length=10, widget=PasswordInput)
-...    realname = CharField(max_length=10, widget=TextInput) # redundantly define widget, just to test
-...    address = CharField()                                 # no max_length defined here
->>> p = UserRegistration(auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" maxlength="10" /></li>
-<li>Password: <input type="password" name="password" maxlength="10" /></li>
-<li>Realname: <input type="text" name="realname" maxlength="10" /></li>
-<li>Address: <input type="text" name="address" /></li>
-
-If you specify a custom "attrs" that includes the "maxlength" attribute,
-the Field's max_length attribute will override whatever "maxlength" you specify
-in "attrs".
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, widget=TextInput(attrs={'maxlength': 20}))
-...    password = CharField(max_length=10, widget=PasswordInput)
->>> p = UserRegistration(auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" maxlength="10" /></li>
-<li>Password: <input type="password" name="password" maxlength="10" /></li>
-
-# Specifying labels ###########################################################
-
-You can specify the label for a field by using the 'label' argument to a Field
-class. If you don't specify 'label', Django will use the field name with
-underscores converted to spaces, and the initial letter capitalized.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, label='Your username')
-...    password1 = CharField(widget=PasswordInput)
-...    password2 = CharField(widget=PasswordInput, label='Password (again)')
->>> p = UserRegistration(auto_id=False)
->>> print p.as_ul()
-<li>Your username: <input type="text" name="username" maxlength="10" /></li>
-<li>Password1: <input type="password" name="password1" /></li>
-<li>Password (again): <input type="password" name="password2" /></li>
-
-A label can be a Unicode object or a bytestring with special characters.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, label='ŠĐĆŽćžšđ')
-...    password = CharField(widget=PasswordInput, label=u'\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111')
->>> p = UserRegistration(auto_id=False)
->>> p.as_ul()
-u'<li>\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111: <input type="text" name="username" maxlength="10" /></li>\n<li>\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111: <input type="password" name="password" /></li>'
-
-If a label is set to the empty string for a field, that field won't get a label.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, label='')
-...    password = CharField(widget=PasswordInput)
->>> p = UserRegistration(auto_id=False)
->>> print p.as_ul()
-<li> <input type="text" name="username" maxlength="10" /></li>
-<li>Password: <input type="password" name="password" /></li>
->>> p = UserRegistration(auto_id='id_%s')
->>> print p.as_ul()
-<li> <input id="id_username" type="text" name="username" maxlength="10" /></li>
-<li><label for="id_password">Password:</label> <input type="password" name="password" id="id_password" /></li>
-
-If label is None, Django will auto-create the label from the field name. This
-is default behavior.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, label=None)
-...    password = CharField(widget=PasswordInput)
->>> p = UserRegistration(auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" maxlength="10" /></li>
-<li>Password: <input type="password" name="password" /></li>
->>> p = UserRegistration(auto_id='id_%s')
->>> print p.as_ul()
-<li><label for="id_username">Username:</label> <input id="id_username" type="text" name="username" maxlength="10" /></li>
-<li><label for="id_password">Password:</label> <input type="password" name="password" id="id_password" /></li>
-
-# Initial data ################################################################
-
-You can specify initial data for a field by using the 'initial' argument to a
-Field class. This initial data is displayed when a Form is rendered with *no*
-data. It is not displayed when a Form is rendered with any data (including an
-empty dictionary). Also, the initial value is *not* used if data for a
-particular required field isn't provided.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, initial='django')
-...    password = CharField(widget=PasswordInput)
-
-Here, we're not submitting any data, so the initial value will be displayed.
->>> p = UserRegistration(auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
-<li>Password: <input type="password" name="password" /></li>
-
-Here, we're submitting data, so the initial value will *not* be displayed.
->>> p = UserRegistration({}, auto_id=False)
->>> print p.as_ul()
-<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
-<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
->>> p = UserRegistration({'username': u''}, auto_id=False)
->>> print p.as_ul()
-<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
-<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
->>> p = UserRegistration({'username': u'foo'}, auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
-<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
-
-An 'initial' value is *not* used as a fallback if data is not provided. In this
-example, we don't provide a value for 'username', and the form raises a
-validation error rather than using the initial value for 'username'.
->>> p = UserRegistration({'password': 'secret'})
->>> p.errors
-{'username': [u'This field is required.']}
->>> p.is_valid()
-False
-
-# Dynamic initial data ########################################################
-
-The previous technique dealt with "hard-coded" initial data, but it's also
-possible to specify initial data after you've already created the Form class
-(i.e., at runtime). Use the 'initial' parameter to the Form constructor. This
-should be a dictionary containing initial values for one or more fields in the
-form, keyed by field name.
-
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10)
-...    password = CharField(widget=PasswordInput)
-
-Here, we're not submitting any data, so the initial value will be displayed.
->>> p = UserRegistration(initial={'username': 'django'}, auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
-<li>Password: <input type="password" name="password" /></li>
->>> p = UserRegistration(initial={'username': 'stephane'}, auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" value="stephane" maxlength="10" /></li>
-<li>Password: <input type="password" name="password" /></li>
-
-The 'initial' parameter is meaningless if you pass data.
->>> p = UserRegistration({}, initial={'username': 'django'}, auto_id=False)
->>> print p.as_ul()
-<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
-<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
->>> p = UserRegistration({'username': u''}, initial={'username': 'django'}, auto_id=False)
->>> print p.as_ul()
-<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
-<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
->>> p = UserRegistration({'username': u'foo'}, initial={'username': 'django'}, auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
-<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
-
-A dynamic 'initial' value is *not* used as a fallback if data is not provided.
-In this example, we don't provide a value for 'username', and the form raises a
-validation error rather than using the initial value for 'username'.
->>> p = UserRegistration({'password': 'secret'}, initial={'username': 'django'})
->>> p.errors
-{'username': [u'This field is required.']}
->>> p.is_valid()
-False
-
-If a Form defines 'initial' *and* 'initial' is passed as a parameter to Form(),
-then the latter will get precedence.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, initial='django')
-...    password = CharField(widget=PasswordInput)
->>> p = UserRegistration(initial={'username': 'babik'}, auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" value="babik" maxlength="10" /></li>
-<li>Password: <input type="password" name="password" /></li>
-
-# Help text ###################################################################
-
-You can specify descriptive text for a field by using the 'help_text' argument
-to a Field class. This help text is displayed when a Form is rendered.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, help_text='e.g., user@example.com')
-...    password = CharField(widget=PasswordInput, help_text='Choose wisely.')
->>> p = UserRegistration(auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" maxlength="10" /> e.g., user@example.com</li>
-<li>Password: <input type="password" name="password" /> Choose wisely.</li>
->>> print p.as_p()
-<p>Username: <input type="text" name="username" maxlength="10" /> e.g., user@example.com</p>
-<p>Password: <input type="password" name="password" /> Choose wisely.</p>
->>> print p.as_table()
-<tr><th>Username:</th><td><input type="text" name="username" maxlength="10" /><br />e.g., user@example.com</td></tr>
-<tr><th>Password:</th><td><input type="password" name="password" /><br />Choose wisely.</td></tr>
-
-The help text is displayed whether or not data is provided for the form.
->>> p = UserRegistration({'username': u'foo'}, auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" value="foo" maxlength="10" /> e.g., user@example.com</li>
-<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /> Choose wisely.</li>
-
-help_text is not displayed for hidden fields. It can be used for documentation
-purposes, though.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, help_text='e.g., user@example.com')
-...    password = CharField(widget=PasswordInput)
-...    next = CharField(widget=HiddenInput, initial='/', help_text='Redirect destination')
->>> p = UserRegistration(auto_id=False)
->>> print p.as_ul()
-<li>Username: <input type="text" name="username" maxlength="10" /> e.g., user@example.com</li>
-<li>Password: <input type="password" name="password" /><input type="hidden" name="next" value="/" /></li>
-
-Help text can include arbitrary Unicode characters.
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, help_text='ŠĐĆŽćžšđ')
->>> p = UserRegistration(auto_id=False)
->>> p.as_ul()
-u'<li>Username: <input type="text" name="username" maxlength="10" /> \u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111</li>'
-
-# Subclassing forms ###########################################################
-
-You can subclass a Form to add fields. The resulting form subclass will have
-all of the fields of the parent Form, plus whichever fields you define in the
-subclass.
->>> class Person(Form):
-...     first_name = CharField()
-...     last_name = CharField()
-...     birthday = DateField()
->>> class Musician(Person):
-...     instrument = CharField()
->>> p = Person(auto_id=False)
->>> print p.as_ul()
-<li>First name: <input type="text" name="first_name" /></li>
-<li>Last name: <input type="text" name="last_name" /></li>
-<li>Birthday: <input type="text" name="birthday" /></li>
->>> m = Musician(auto_id=False)
->>> print m.as_ul()
-<li>First name: <input type="text" name="first_name" /></li>
-<li>Last name: <input type="text" name="last_name" /></li>
-<li>Birthday: <input type="text" name="birthday" /></li>
-<li>Instrument: <input type="text" name="instrument" /></li>
-
-Yes, you can subclass multiple forms. The fields are added in the order in
-which the parent classes are listed.
->>> class Person(Form):
-...     first_name = CharField()
-...     last_name = CharField()
-...     birthday = DateField()
->>> class Instrument(Form):
-...     instrument = CharField()
->>> class Beatle(Person, Instrument):
-...     haircut_type = CharField()
->>> b = Beatle(auto_id=False)
->>> print b.as_ul()
-<li>First name: <input type="text" name="first_name" /></li>
-<li>Last name: <input type="text" name="last_name" /></li>
-<li>Birthday: <input type="text" name="birthday" /></li>
-<li>Instrument: <input type="text" name="instrument" /></li>
-<li>Haircut type: <input type="text" name="haircut_type" /></li>
-
-# Forms with prefixes #########################################################
-
-Sometimes it's necessary to have multiple forms display on the same HTML page,
-or multiple copies of the same form. We can accomplish this with form prefixes.
-Pass the keyword argument 'prefix' to the Form constructor to use this feature.
-This value will be prepended to each HTML form field name. One way to think
-about this is "namespaces for HTML forms". Notice that in the data argument,
-each field's key has the prefix, in this case 'person1', prepended to the
-actual field name.
->>> class Person(Form):
-...     first_name = CharField()
-...     last_name = CharField()
-...     birthday = DateField()
->>> data = {
-...     'person1-first_name': u'John',
-...     'person1-last_name': u'Lennon',
-...     'person1-birthday': u'1940-10-9'
-... }
->>> p = Person(data, prefix='person1')
->>> print p.as_ul()
-<li><label for="id_person1-first_name">First name:</label> <input type="text" name="person1-first_name" value="John" id="id_person1-first_name" /></li>
-<li><label for="id_person1-last_name">Last name:</label> <input type="text" name="person1-last_name" value="Lennon" id="id_person1-last_name" /></li>
-<li><label for="id_person1-birthday">Birthday:</label> <input type="text" name="person1-birthday" value="1940-10-9" id="id_person1-birthday" /></li>
->>> print p['first_name']
-<input type="text" name="person1-first_name" value="John" id="id_person1-first_name" />
->>> print p['last_name']
-<input type="text" name="person1-last_name" value="Lennon" id="id_person1-last_name" />
->>> print p['birthday']
-<input type="text" name="person1-birthday" value="1940-10-9" id="id_person1-birthday" />
->>> p.errors
-{}
->>> p.is_valid()
-True
->>> p.clean_data
-{'first_name': u'John', 'last_name': u'Lennon', 'birthday': datetime.date(1940, 10, 9)}
-
-Let's try submitting some bad data to make sure form.errors and field.errors
-work as expected.
->>> data = {
-...     'person1-first_name': u'',
-...     'person1-last_name': u'',
-...     'person1-birthday': u''
-... }
->>> p = Person(data, prefix='person1')
->>> p.errors
-{'first_name': [u'This field is required.'], 'last_name': [u'This field is required.'], 'birthday': [u'This field is required.']}
->>> p['first_name'].errors
-[u'This field is required.']
->>> p['person1-first_name'].errors
-Traceback (most recent call last):
-...
-KeyError: "Key 'person1-first_name' not found in Form"
-
-In this example, the data doesn't have a prefix, but the form requires it, so
-the form doesn't "see" the fields.
->>> data = {
-...     'first_name': u'John',
-...     'last_name': u'Lennon',
-...     'birthday': u'1940-10-9'
-... }
->>> p = Person(data, prefix='person1')
->>> p.errors
-{'first_name': [u'This field is required.'], 'last_name': [u'This field is required.'], 'birthday': [u'This field is required.']}
-
-With prefixes, a single data dictionary can hold data for multiple instances
-of the same form.
->>> data = {
-...     'person1-first_name': u'John',
-...     'person1-last_name': u'Lennon',
-...     'person1-birthday': u'1940-10-9',
-...     'person2-first_name': u'Jim',
-...     'person2-last_name': u'Morrison',
-...     'person2-birthday': u'1943-12-8'
-... }
->>> p1 = Person(data, prefix='person1')
->>> p1.is_valid()
-True
->>> p1.clean_data
-{'first_name': u'John', 'last_name': u'Lennon', 'birthday': datetime.date(1940, 10, 9)}
->>> p2 = Person(data, prefix='person2')
->>> p2.is_valid()
-True
->>> p2.clean_data
-{'first_name': u'Jim', 'last_name': u'Morrison', 'birthday': datetime.date(1943, 12, 8)}
-
-By default, forms append a hyphen between the prefix and the field name, but a
-form can alter that behavior by implementing the add_prefix() method. This
-method takes a field name and returns the prefixed field, according to
-self.prefix.
->>> class Person(Form):
-...     first_name = CharField()
-...     last_name = CharField()
-...     birthday = DateField()
-...     def add_prefix(self, field_name):
-...         return self.prefix and '%s-prefix-%s' % (self.prefix, field_name) or field_name
->>> p = Person(prefix='foo')
->>> print p.as_ul()
-<li><label for="id_foo-prefix-first_name">First name:</label> <input type="text" name="foo-prefix-first_name" id="id_foo-prefix-first_name" /></li>
-<li><label for="id_foo-prefix-last_name">Last name:</label> <input type="text" name="foo-prefix-last_name" id="id_foo-prefix-last_name" /></li>
-<li><label for="id_foo-prefix-birthday">Birthday:</label> <input type="text" name="foo-prefix-birthday" id="id_foo-prefix-birthday" /></li>
->>> data = {
-...     'foo-prefix-first_name': u'John',
-...     'foo-prefix-last_name': u'Lennon',
-...     'foo-prefix-birthday': u'1940-10-9'
-... }
->>> p = Person(data, prefix='foo')
->>> p.is_valid()
-True
->>> p.clean_data
-{'first_name': u'John', 'last_name': u'Lennon', 'birthday': datetime.date(1940, 10, 9)}
-
-# Forms with NullBooleanFields ################################################
-
-NullBooleanField is a bit of a special case because its presentation (widget)
-is different than its data. This is handled transparently, though.
-
->>> class Person(Form):
-...     name = CharField()
-...     is_cool = NullBooleanField()
->>> p = Person({'name': u'Joe'}, auto_id=False)
->>> print p['is_cool']
-<select name="is_cool">
-<option value="1" selected="selected">Unknown</option>
-<option value="2">Yes</option>
-<option value="3">No</option>
-</select>
->>> p = Person({'name': u'Joe', 'is_cool': u'1'}, auto_id=False)
->>> print p['is_cool']
-<select name="is_cool">
-<option value="1" selected="selected">Unknown</option>
-<option value="2">Yes</option>
-<option value="3">No</option>
-</select>
->>> p = Person({'name': u'Joe', 'is_cool': u'2'}, auto_id=False)
->>> print p['is_cool']
-<select name="is_cool">
-<option value="1">Unknown</option>
-<option value="2" selected="selected">Yes</option>
-<option value="3">No</option>
-</select>
->>> p = Person({'name': u'Joe', 'is_cool': u'3'}, auto_id=False)
->>> print p['is_cool']
-<select name="is_cool">
-<option value="1">Unknown</option>
-<option value="2">Yes</option>
-<option value="3" selected="selected">No</option>
-</select>
->>> p = Person({'name': u'Joe', 'is_cool': True}, auto_id=False)
->>> print p['is_cool']
-<select name="is_cool">
-<option value="1">Unknown</option>
-<option value="2" selected="selected">Yes</option>
-<option value="3">No</option>
-</select>
->>> p = Person({'name': u'Joe', 'is_cool': False}, auto_id=False)
->>> print p['is_cool']
-<select name="is_cool">
-<option value="1">Unknown</option>
-<option value="2">Yes</option>
-<option value="3" selected="selected">No</option>
-</select>
-
-# Basic form processing in a view #############################################
-
->>> from django.template import Template, Context
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10)
-...    password1 = CharField(widget=PasswordInput)
-...    password2 = CharField(widget=PasswordInput)
-...    def clean(self):
-...        if self.clean_data.get('password1') and self.clean_data.get('password2') and self.clean_data['password1'] != self.clean_data['password2']:
-...            raise ValidationError(u'Please make sure your passwords match.')
-...        return self.clean_data
->>> def my_function(method, post_data):
-...     if method == 'POST':
-...         form = UserRegistration(post_data, auto_id=False)
-...     else:
-...         form = UserRegistration(auto_id=False)
-...     if form.is_valid():
-...         return 'VALID: %r' % form.clean_data
-...     t = Template('<form action="" method="post">\n<table>\n{{ form }}\n</table>\n<input type="submit" />\n</form>')
-...     return t.render(Context({'form': form}))
-
-Case 1: GET (an empty form, with no errors).
->>> print my_function('GET', {})
-<form action="" method="post">
-<table>
-<tr><th>Username:</th><td><input type="text" name="username" maxlength="10" /></td></tr>
-<tr><th>Password1:</th><td><input type="password" name="password1" /></td></tr>
-<tr><th>Password2:</th><td><input type="password" name="password2" /></td></tr>
-</table>
-<input type="submit" />
-</form>
-
-Case 2: POST with erroneous data (a redisplayed form, with errors).
->>> print my_function('POST', {'username': 'this-is-a-long-username', 'password1': 'foo', 'password2': 'bar'})
-<form action="" method="post">
-<table>
-<tr><td colspan="2"><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></td></tr>
-<tr><th>Username:</th><td><ul class="errorlist"><li>Ensure this value has at most 10 characters.</li></ul><input type="text" name="username" value="this-is-a-long-username" maxlength="10" /></td></tr>
-<tr><th>Password1:</th><td><input type="password" name="password1" value="foo" /></td></tr>
-<tr><th>Password2:</th><td><input type="password" name="password2" value="bar" /></td></tr>
-</table>
-<input type="submit" />
-</form>
-
-Case 3: POST with valid data (the success message).
->>> print my_function('POST', {'username': 'adrian', 'password1': 'secret', 'password2': 'secret'})
-VALID: {'username': u'adrian', 'password1': u'secret', 'password2': u'secret'}
-
-# Some ideas for using templates with forms ###################################
-
->>> class UserRegistration(Form):
-...    username = CharField(max_length=10, help_text="Good luck picking a username that doesn't already exist.")
-...    password1 = CharField(widget=PasswordInput)
-...    password2 = CharField(widget=PasswordInput)
-...    def clean(self):
-...        if self.clean_data.get('password1') and self.clean_data.get('password2') and self.clean_data['password1'] != self.clean_data['password2']:
-...            raise ValidationError(u'Please make sure your passwords match.')
-...        return self.clean_data
-
-You have full flexibility in displaying form fields in a template. Just pass a
-Form instance to the template, and use "dot" access to refer to individual
-fields. Note, however, that this flexibility comes with the responsibility of
-displaying all the errors, including any that might not be associated with a
-particular field.
->>> t = Template('''<form action="">
-... {{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
-... {{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
-... {{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
-... <input type="submit" />
-... </form>''')
->>> print t.render(Context({'form': UserRegistration(auto_id=False)}))
-<form action="">
-<p><label>Your username: <input type="text" name="username" maxlength="10" /></label></p>
-<p><label>Password: <input type="password" name="password1" /></label></p>
-<p><label>Password (again): <input type="password" name="password2" /></label></p>
-<input type="submit" />
-</form>
->>> print t.render(Context({'form': UserRegistration({'username': 'django'}, auto_id=False)}))
-<form action="">
-<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
-<ul class="errorlist"><li>This field is required.</li></ul><p><label>Password: <input type="password" name="password1" /></label></p>
-<ul class="errorlist"><li>This field is required.</li></ul><p><label>Password (again): <input type="password" name="password2" /></label></p>
-<input type="submit" />
-</form>
-
-Use form.[field].label to output a field's label. You can specify the label for
-a field by using the 'label' argument to a Field class. If you don't specify
-'label', Django will use the field name with underscores converted to spaces,
-and the initial letter capitalized.
->>> t = Template('''<form action="">
-... <p><label>{{ form.username.label }}: {{ form.username }}</label></p>
-... <p><label>{{ form.password1.label }}: {{ form.password1 }}</label></p>
-... <p><label>{{ form.password2.label }}: {{ form.password2 }}</label></p>
-... <input type="submit" />
-... </form>''')
->>> print t.render(Context({'form': UserRegistration(auto_id=False)}))
-<form action="">
-<p><label>Username: <input type="text" name="username" maxlength="10" /></label></p>
-<p><label>Password1: <input type="password" name="password1" /></label></p>
-<p><label>Password2: <input type="password" name="password2" /></label></p>
-<input type="submit" />
-</form>
-
-User form.[field].label_tag to output a field's label with a <label> tag
-wrapped around it, but *only* if the given field has an "id" attribute.
-Recall from above that passing the "auto_id" argument to a Form gives each
-field an "id" attribute.
->>> t = Template('''<form action="">
-... <p>{{ form.username.label_tag }}: {{ form.username }}</p>
-... <p>{{ form.password1.label_tag }}: {{ form.password1 }}</p>
-... <p>{{ form.password2.label_tag }}: {{ form.password2 }}</p>
-... <input type="submit" />
-... </form>''')
->>> print t.render(Context({'form': UserRegistration(auto_id=False)}))
-<form action="">
-<p>Username: <input type="text" name="username" maxlength="10" /></p>
-<p>Password1: <input type="password" name="password1" /></p>
-<p>Password2: <input type="password" name="password2" /></p>
-<input type="submit" />
-</form>
->>> print t.render(Context({'form': UserRegistration(auto_id='id_%s')}))
-<form action="">
-<p><label for="id_username">Username</label>: <input id="id_username" type="text" name="username" maxlength="10" /></p>
-<p><label for="id_password1">Password1</label>: <input type="password" name="password1" id="id_password1" /></p>
-<p><label for="id_password2">Password2</label>: <input type="password" name="password2" id="id_password2" /></p>
-<input type="submit" />
-</form>
-
-User form.[field].help_text to output a field's help text. If the given field
-does not have help text, nothing will be output.
->>> t = Template('''<form action="">
-... <p>{{ form.username.label_tag }}: {{ form.username }}<br />{{ form.username.help_text }}</p>
-... <p>{{ form.password1.label_tag }}: {{ form.password1 }}</p>
-... <p>{{ form.password2.label_tag }}: {{ form.password2 }}</p>
-... <input type="submit" />
-... </form>''')
->>> print t.render(Context({'form': UserRegistration(auto_id=False)}))
-<form action="">
-<p>Username: <input type="text" name="username" maxlength="10" /><br />Good luck picking a username that doesn't already exist.</p>
-<p>Password1: <input type="password" name="password1" /></p>
-<p>Password2: <input type="password" name="password2" /></p>
-<input type="submit" />
-</form>
->>> Template('{{ form.password1.help_text }}').render(Context({'form': UserRegistration(auto_id=False)}))
-''
-
-The label_tag() method takes an optional attrs argument: a dictionary of HTML
-attributes to add to the <label> tag.
->>> f = UserRegistration(auto_id='id_%s')
->>> for bf in f:
-...     print bf.label_tag(attrs={'class': 'pretty'})
-<label for="id_username" class="pretty">Username</label>
-<label for="id_password1" class="pretty">Password1</label>
-<label for="id_password2" class="pretty">Password2</label>
-
-To display the errors that aren't associated with a particular field -- e.g.,
-the errors caused by Form.clean() -- use {{ form.non_field_errors }} in the
-template. If used on its own, it is displayed as a <ul> (or an empty string, if
-the list of errors is empty). You can also use it in {% if %} statements.
->>> t = Template('''<form action="">
-... {{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
-... {{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
-... {{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
-... <input type="submit" />
-... </form>''')
->>> print t.render(Context({'form': UserRegistration({'username': 'django', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)}))
-<form action="">
-<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
-<p><label>Password: <input type="password" name="password1" value="foo" /></label></p>
-<p><label>Password (again): <input type="password" name="password2" value="bar" /></label></p>
-<input type="submit" />
-</form>
->>> t = Template('''<form action="">
-... {{ form.non_field_errors }}
-... {{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
-... {{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
-... {{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
-... <input type="submit" />
-... </form>''')
->>> print t.render(Context({'form': UserRegistration({'username': 'django', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)}))
-<form action="">
-<ul class="errorlist"><li>Please make sure your passwords match.</li></ul>
-<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
-<p><label>Password: <input type="password" name="password1" value="foo" /></label></p>
-<p><label>Password (again): <input type="password" name="password2" value="bar" /></label></p>
-<input type="submit" />
-</form>
-
-###############
-# Extra stuff #
-###############
-
-The newforms library comes with some extra, higher-level Field and Widget
-classes that demonstrate some of the library's abilities.
-
-# SelectDateWidget ############################################################
-
->>> from django.newforms.extras import SelectDateWidget
->>> w = SelectDateWidget(years=('2007','2008','2009','2010','2011','2012','2013','2014','2015','2016'))
->>> print w.render('mydate', '')
-<select name="mydate_month">
-<option value="1">January</option>
-<option value="2">February</option>
-<option value="3">March</option>
-<option value="4">April</option>
-<option value="5">May</option>
-<option value="6">June</option>
-<option value="7">July</option>
-<option value="8">August</option>
-<option value="9">September</option>
-<option value="10">October</option>
-<option value="11">November</option>
-<option value="12">December</option>
-</select>
-<select name="mydate_day">
-<option value="1">1</option>
-<option value="2">2</option>
-<option value="3">3</option>
-<option value="4">4</option>
-<option value="5">5</option>
-<option value="6">6</option>
-<option value="7">7</option>
-<option value="8">8</option>
-<option value="9">9</option>
-<option value="10">10</option>
-<option value="11">11</option>
-<option value="12">12</option>
-<option value="13">13</option>
-<option value="14">14</option>
-<option value="15">15</option>
-<option value="16">16</option>
-<option value="17">17</option>
-<option value="18">18</option>
-<option value="19">19</option>
-<option value="20">20</option>
-<option value="21">21</option>
-<option value="22">22</option>
-<option value="23">23</option>
-<option value="24">24</option>
-<option value="25">25</option>
-<option value="26">26</option>
-<option value="27">27</option>
-<option value="28">28</option>
-<option value="29">29</option>
-<option value="30">30</option>
-<option value="31">31</option>
-</select>
-<select name="mydate_year">
-<option value="2007">2007</option>
-<option value="2008">2008</option>
-<option value="2009">2009</option>
-<option value="2010">2010</option>
-<option value="2011">2011</option>
-<option value="2012">2012</option>
-<option value="2013">2013</option>
-<option value="2014">2014</option>
-<option value="2015">2015</option>
-<option value="2016">2016</option>
-</select>
->>> w.render('mydate', None) == w.render('mydate', '')
-True
->>> print w.render('mydate', '2010-04-15')
-<select name="mydate_month">
-<option value="1">January</option>
-<option value="2">February</option>
-<option value="3">March</option>
-<option value="4" selected="selected">April</option>
-<option value="5">May</option>
-<option value="6">June</option>
-<option value="7">July</option>
-<option value="8">August</option>
-<option value="9">September</option>
-<option value="10">October</option>
-<option value="11">November</option>
-<option value="12">December</option>
-</select>
-<select name="mydate_day">
-<option value="1">1</option>
-<option value="2">2</option>
-<option value="3">3</option>
-<option value="4">4</option>
-<option value="5">5</option>
-<option value="6">6</option>
-<option value="7">7</option>
-<option value="8">8</option>
-<option value="9">9</option>
-<option value="10">10</option>
-<option value="11">11</option>
-<option value="12">12</option>
-<option value="13">13</option>
-<option value="14">14</option>
-<option value="15" selected="selected">15</option>
-<option value="16">16</option>
-<option value="17">17</option>
-<option value="18">18</option>
-<option value="19">19</option>
-<option value="20">20</option>
-<option value="21">21</option>
-<option value="22">22</option>
-<option value="23">23</option>
-<option value="24">24</option>
-<option value="25">25</option>
-<option value="26">26</option>
-<option value="27">27</option>
-<option value="28">28</option>
-<option value="29">29</option>
-<option value="30">30</option>
-<option value="31">31</option>
-</select>
-<select name="mydate_year">
-<option value="2007">2007</option>
-<option value="2008">2008</option>
-<option value="2009">2009</option>
-<option value="2010" selected="selected">2010</option>
-<option value="2011">2011</option>
-<option value="2012">2012</option>
-<option value="2013">2013</option>
-<option value="2014">2014</option>
-<option value="2015">2015</option>
-<option value="2016">2016</option>
-</select>
-
-# USZipCodeField ##############################################################
-
-USZipCodeField validates that the data is either a five-digit U.S. zip code or
-a zip+4.
->>> from django.contrib.localflavor.usa.forms import USZipCodeField
->>> f = USZipCodeField()
->>> f.clean('60606')
-u'60606'
->>> f.clean(60606)
-u'60606'
->>> f.clean('04000')
-u'04000'
->>> f.clean('4000')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a zip code in the format XXXXX or XXXXX-XXXX.']
->>> f.clean('60606-1234')
-u'60606-1234'
->>> f.clean('6060-1234')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a zip code in the format XXXXX or XXXXX-XXXX.']
->>> f.clean('60606-')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a zip code in the format XXXXX or XXXXX-XXXX.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
-
->>> f = USZipCodeField(required=False)
->>> f.clean('60606')
-u'60606'
->>> f.clean(60606)
-u'60606'
->>> f.clean('04000')
-u'04000'
->>> f.clean('4000')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a zip code in the format XXXXX or XXXXX-XXXX.']
->>> f.clean('60606-1234')
-u'60606-1234'
->>> f.clean('6060-1234')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a zip code in the format XXXXX or XXXXX-XXXX.']
->>> f.clean('60606-')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a zip code in the format XXXXX or XXXXX-XXXX.']
->>> f.clean(None)
-u''
->>> f.clean('')
-u''
-
-# USPhoneNumberField ##########################################################
-
-USPhoneNumberField validates that the data is a valid U.S. phone number,
-including the area code. It's normalized to XXX-XXX-XXXX format.
->>> from django.contrib.localflavor.usa.forms import USPhoneNumberField
->>> f = USPhoneNumberField()
->>> f.clean('312-555-1212')
-u'312-555-1212'
->>> f.clean('3125551212')
-u'312-555-1212'
->>> f.clean('312 555-1212')
-u'312-555-1212'
->>> f.clean('(312) 555-1212')
-u'312-555-1212'
->>> f.clean('312 555 1212')
-u'312-555-1212'
->>> f.clean('312.555.1212')
-u'312-555-1212'
->>> f.clean('312.555-1212')
-u'312-555-1212'
->>> f.clean(' (312) 555.1212 ')
-u'312-555-1212'
->>> f.clean('555-1212')
-Traceback (most recent call last):
-...
-ValidationError: [u'Phone numbers must be in XXX-XXX-XXXX format.']
->>> f.clean('312-55-1212')
-Traceback (most recent call last):
-...
-ValidationError: [u'Phone numbers must be in XXX-XXX-XXXX format.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
-
->>> f = USPhoneNumberField(required=False)
->>> f.clean('312-555-1212')
-u'312-555-1212'
->>> f.clean('3125551212')
-u'312-555-1212'
->>> f.clean('312 555-1212')
-u'312-555-1212'
->>> f.clean('(312) 555-1212')
-u'312-555-1212'
->>> f.clean('312 555 1212')
-u'312-555-1212'
->>> f.clean('312.555.1212')
-u'312-555-1212'
->>> f.clean('312.555-1212')
-u'312-555-1212'
->>> f.clean(' (312) 555.1212 ')
-u'312-555-1212'
->>> f.clean('555-1212')
-Traceback (most recent call last):
-...
-ValidationError: [u'Phone numbers must be in XXX-XXX-XXXX format.']
->>> f.clean('312-55-1212')
-Traceback (most recent call last):
-...
-ValidationError: [u'Phone numbers must be in XXX-XXX-XXXX format.']
->>> f.clean(None)
-u''
->>> f.clean('')
-u''
-
-# USStateField ################################################################
-
-USStateField validates that the data is either an abbreviation or name of a
-U.S. state.
->>> from django.contrib.localflavor.usa.forms import USStateField
->>> f = USStateField()
->>> f.clean('il')
-u'IL'
->>> f.clean('IL')
-u'IL'
->>> f.clean('illinois')
-u'IL'
->>> f.clean('  illinois ')
-u'IL'
->>> f.clean(60606)
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a U.S. state or territory.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
-
->>> f = USStateField(required=False)
->>> f.clean('il')
-u'IL'
->>> f.clean('IL')
-u'IL'
->>> f.clean('illinois')
-u'IL'
->>> f.clean('  illinois ')
-u'IL'
->>> f.clean(60606)
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a U.S. state or territory.']
->>> f.clean(None)
-u''
->>> f.clean('')
-u''
-
-# USStateSelect ###############################################################
-
-USStateSelect is a Select widget that uses a list of U.S. states/territories
-as its choices.
->>> from django.contrib.localflavor.usa.forms import USStateSelect
->>> w = USStateSelect()
->>> print w.render('state', 'IL')
-<select name="state">
-<option value="AL">Alabama</option>
-<option value="AK">Alaska</option>
-<option value="AS">American Samoa</option>
-<option value="AZ">Arizona</option>
-<option value="AR">Arkansas</option>
-<option value="CA">California</option>
-<option value="CO">Colorado</option>
-<option value="CT">Connecticut</option>
-<option value="DE">Deleware</option>
-<option value="DC">District of Columbia</option>
-<option value="FM">Federated States of Micronesia</option>
-<option value="FL">Florida</option>
-<option value="GA">Georgia</option>
-<option value="GU">Guam</option>
-<option value="HI">Hawaii</option>
-<option value="ID">Idaho</option>
-<option value="IL" selected="selected">Illinois</option>
-<option value="IN">Indiana</option>
-<option value="IA">Iowa</option>
-<option value="KS">Kansas</option>
-<option value="KY">Kentucky</option>
-<option value="LA">Louisiana</option>
-<option value="ME">Maine</option>
-<option value="MH">Marshall Islands</option>
-<option value="MD">Maryland</option>
-<option value="MA">Massachusetts</option>
-<option value="MI">Michigan</option>
-<option value="MN">Minnesota</option>
-<option value="MS">Mississippi</option>
-<option value="MO">Missouri</option>
-<option value="MT">Montana</option>
-<option value="NE">Nebraska</option>
-<option value="NV">Nevada</option>
-<option value="NH">New Hampshire</option>
-<option value="NJ">New Jersey</option>
-<option value="NM">New Mexico</option>
-<option value="NY">New York</option>
-<option value="NC">North Carolina</option>
-<option value="ND">North Dakota</option>
-<option value="MP">Northern Mariana Islands</option>
-<option value="OH">Ohio</option>
-<option value="OK">Oklahoma</option>
-<option value="OR">Oregon</option>
-<option value="PW">Palau</option>
-<option value="PA">Pennsylvania</option>
-<option value="PR">Puerto Rico</option>
-<option value="RI">Rhode Island</option>
-<option value="SC">South Carolina</option>
-<option value="SD">South Dakota</option>
-<option value="TN">Tennessee</option>
-<option value="TX">Texas</option>
-<option value="UT">Utah</option>
-<option value="VT">Vermont</option>
-<option value="VI">Virgin Islands</option>
-<option value="VA">Virginia</option>
-<option value="WA">Washington</option>
-<option value="WV">West Virginia</option>
-<option value="WI">Wisconsin</option>
-<option value="WY">Wyoming</option>
-</select>
-
-# UKPostcodeField #############################################################
-
-UKPostcodeField validates that the data is a valid UK postcode.
->>> from django.contrib.localflavor.uk.forms import UKPostcodeField
->>> f = UKPostcodeField()
->>> f.clean('BT32 4PX')
-u'BT32 4PX'
->>> f.clean('GIR 0AA')
-u'GIR 0AA'
->>> f.clean('BT324PX')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a postcode. A space is required between the two postcode parts.']
->>> f.clean('1NV 4L1D')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a postcode. A space is required between the two postcode parts.']
->>> f.clean(None)
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
->>> f.clean('')
-Traceback (most recent call last):
-...
-ValidationError: [u'This field is required.']
-
->>> f = UKPostcodeField(required=False)
->>> f.clean('BT32 4PX')
-u'BT32 4PX'
->>> f.clean('GIR 0AA')
-u'GIR 0AA'
->>> f.clean('1NV 4L1D')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a postcode. A space is required between the two postcode parts.']
->>> f.clean('BT324PX')
-Traceback (most recent call last):
-...
-ValidationError: [u'Enter a postcode. A space is required between the two postcode parts.']
->>> f.clean(None)
-u''
->>> f.clean('')
-u''
-
-#################################
-# Tests of underlying functions #
-#################################
-
-# smart_unicode tests
->>> from django.newforms.util import smart_unicode
->>> class Test:
-...     def __str__(self):
-...        return 'ŠĐĆŽćžšđ'
->>> class TestU:
-...     def __str__(self):
-...        return 'Foo'
-...     def __unicode__(self):
-...        return u'\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111'
->>> smart_unicode(Test())
-u'\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111'
->>> smart_unicode(TestU())
-u'\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111'
->>> smart_unicode(1)
-u'1'
->>> smart_unicode('foo')
-u'foo'
-"""
-
-if __name__ == "__main__":
-    import doctest
-    doctest.testmod()
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/httpwrappers/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,358 +0,0 @@
-"""
-###################
-# Empty QueryDict #
-###################
-
->>> q = QueryDict('')
-
->>> q['foo']
-Traceback (most recent call last):
-...
-MultiValueDictKeyError: "Key 'foo' not found in <MultiValueDict: {}>"
-
->>> q['something'] = 'bar'
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.get('foo', 'default')
-'default'
-
->>> q.getlist('foo')
-[]
-
->>> q.setlist('foo', ['bar', 'baz'])
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.appendlist('foo', ['bar'])
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.has_key('foo')
-False
-
->>> q.items()
-[]
-
->>> q.lists()
-[]
-
->>> q.keys()
-[]
-
->>> q.values()
-[]
-
->>> len(q)
-0
-
->>> q.update({'foo': 'bar'})
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.pop('foo')
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.popitem()
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.clear()
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.setdefault('foo', 'bar')
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.urlencode()
-''
-
-###################################
-# Mutable copy of empty QueryDict #
-###################################
-
->>> q = q.copy()
-
->>> q['foo']
-Traceback (most recent call last):
-...
-MultiValueDictKeyError: "Key 'foo' not found in <MultiValueDict: {}>"
-
->>> q['name'] = 'john'
-
->>> q['name']
-'john'
-
->>> q.get('foo', 'default')
-'default'
-
->>> q.get('name', 'default')
-'john'
-
->>> q.getlist('name')
-['john']
-
->>> q.getlist('foo')
-[]
-
->>> q.setlist('foo', ['bar', 'baz'])
-
->>> q.get('foo', 'default')
-'baz'
-
->>> q.getlist('foo')
-['bar', 'baz']
-
->>> q.appendlist('foo', 'another')
-
->>> q.getlist('foo')
-['bar', 'baz', 'another']
-
->>> q['foo']
-'another'
-
->>> q.has_key('foo')
-True
-
->>> q.items()
-[('foo', 'another'), ('name', 'john')]
-
->>> q.lists()
-[('foo', ['bar', 'baz', 'another']), ('name', ['john'])]
-
->>> q.keys()
-['foo', 'name']
-
->>> q.values()
-['another', 'john']
-
->>> len(q)
-2
-
->>> q.update({'foo': 'hello'})
-
-# Displays last value
->>> q['foo']
-'hello'
-
->>> q.get('foo', 'not available')
-'hello'
-
->>> q.getlist('foo')
-['bar', 'baz', 'another', 'hello']
-
->>> q.pop('foo')
-['bar', 'baz', 'another', 'hello']
-
->>> q.get('foo', 'not there')
-'not there'
-
->>> q.setdefault('foo', 'bar')
-'bar'
-
->>> q['foo']
-'bar'
-
->>> q.getlist('foo')
-['bar']
-
->>> q.urlencode()
-'foo=bar&name=john'
-
->>> q.clear()
-
->>> len(q)
-0
-
-#####################################
-# QueryDict with one key/value pair #
-#####################################
-
->>> q = QueryDict('foo=bar')
-
->>> q['foo']
-'bar'
-
->>> q['bar']
-Traceback (most recent call last):
-...
-MultiValueDictKeyError: "Key 'bar' not found in <MultiValueDict: {'foo': ['bar']}>"
-
->>> q['something'] = 'bar'
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.get('foo', 'default')
-'bar'
-
->>> q.get('bar', 'default')
-'default'
-
->>> q.getlist('foo')
-['bar']
-
->>> q.getlist('bar')
-[]
-
->>> q.setlist('foo', ['bar', 'baz'])
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.appendlist('foo', ['bar'])
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.has_key('foo')
-True
-
->>> q.has_key('bar')
-False
-
->>> q.items()
-[('foo', 'bar')]
-
->>> q.lists()
-[('foo', ['bar'])]
-
->>> q.keys()
-['foo']
-
->>> q.values()
-['bar']
-
->>> len(q)
-1
-
->>> q.update({'foo': 'bar'})
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.pop('foo')
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.popitem()
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.clear()
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.setdefault('foo', 'bar')
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.urlencode()
-'foo=bar'
-
-#####################################################
-# QueryDict with two key/value pairs with same keys #
-#####################################################
-
->>> q = QueryDict('vote=yes&vote=no')
-
->>> q['vote']
-'no'
-
->>> q['something'] = 'bar'
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.get('vote', 'default')
-'no'
-
->>> q.get('foo', 'default')
-'default'
-
->>> q.getlist('vote')
-['yes', 'no']
-
->>> q.getlist('foo')
-[]
-
->>> q.setlist('foo', ['bar', 'baz'])
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.appendlist('foo', ['bar'])
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.has_key('vote')
-True
-
->>> q.has_key('foo')
-False
-
->>> q.items()
-[('vote', 'no')]
-
->>> q.lists()
-[('vote', ['yes', 'no'])]
-
->>> q.keys()
-['vote']
-
->>> q.values()
-['no']
-
->>> len(q)
-1
-
->>> q.update({'foo': 'bar'})
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.pop('foo')
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.popitem()
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.clear()
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.setdefault('foo', 'bar')
-Traceback (most recent call last):
-...
-AttributeError: This QueryDict instance is immutable
-
->>> q.urlencode()
-'vote=yes&vote=no'
-
-"""
-
-from django.http import QueryDict
-
-if __name__ == "__main__":
-    import doctest
-    doctest.testmod()
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/humanize/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,52 +0,0 @@
-import unittest
-from django.template import Template, Context, add_to_builtins
-
-add_to_builtins('django.contrib.humanize.templatetags.humanize')
-
-class HumanizeTests(unittest.TestCase):
-
-    def humanize_tester(self, test_list, result_list, method):
-        # Using max below ensures we go through both lists
-        # However, if the lists are not equal length, this raises an exception
-        for index in xrange(len(max(test_list,result_list))):
-            test_content = test_list[index]
-            t = Template('{{ test_content|%s }}' % method)
-            rendered = t.render(Context(locals())).strip()
-            self.assertEqual(rendered, result_list[index],
-                             msg="""%s test failed, produced %s,
-should've produced %s""" % (method, rendered, result_list[index]))
-    
-    def test_ordinal(self):
-        test_list = ('1','2','3','4','11','12',
-                     '13','101','102','103','111',
-                     'something else')
-        result_list = ('1st', '2nd', '3rd', '4th', '11th',
-                       '12th', '13th', '101st', '102nd', '103rd',
-                       '111th', 'something else')
-
-        self.humanize_tester(test_list, result_list, 'ordinal')
-
-    def test_intcomma(self):
-        test_list = ('100','1000','10123','10311','1000000')
-        result_list = ('100', '1,000', '10,123', '10,311', '1,000,000')
-
-        self.humanize_tester(test_list, result_list, 'intcomma')
-
-    def test_intword(self):
-        test_list = ('100', '1000000', '1200000', '1290000',
-                     '1000000000','2000000000','6000000000000')
-        result_list = ('100', '1.0 million', '1.2 million', '1.3 million',
-                       '1.0 billion', '2.0 billion', '6.0 trillion')
-
-        self.humanize_tester(test_list, result_list, 'intword')
-
-    def test_apnumber(self):
-        test_list = [str(x) for x in xrange(1,11)]
-        result_list = ('one', 'two', 'three', 'four', 'five', 'six',
-                       'seven', 'eight', 'nine', '10')
-
-        self.humanize_tester(test_list, result_list, 'apnumber')
-
-if __name__ == '__main__':
-    unittest.main()
-    
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/initial_sql_regress/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-"""
-Regression tests for initial SQL insertion.
-"""
-
-from django.db import models
-
-class Simple(models.Model):
-    name = models.CharField(maxlength = 50)
-
-__test__ = {'API_TESTS':""}
-
-# NOTE: The format of the included SQL file for this test suite is important.
-# It must end with a trailing newline in order to test the fix for #2161.
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/initial_sql_regress/sql/simple.sql	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-INSERT INTO initial_sql_regress_simple (name) VALUES ('John');
-INSERT INTO initial_sql_regress_simple (name) VALUES ('Paul');
-INSERT INTO initial_sql_regress_simple (name) VALUES ('Ringo');
-INSERT INTO initial_sql_regress_simple (name) VALUES ('George');
-INSERT INTO initial_sql_regress_simple (name) VALUES ('Miles O''Brien');
-INSERT INTO initial_sql_regress_simple (name) VALUES ('Semicolon;Man');
-INSERT INTO initial_sql_regress_simple (name) VALUES ('This line has a Windows line ending');
-
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/invalid_admin_options/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,337 +0,0 @@
-"""
-Admin options
-
-Test invalid and valid admin options to make sure that
-model validation is working properly. 
-"""
-
-from django.db import models
-model_errors = ""
-
-# TODO: Invalid admin options should not cause a metaclass error
-##This should fail gracefully but is causing a metaclass error
-#class BadAdminOption(models.Model):
-#    "Test nonexistent admin option"
-#    name = models.CharField(maxlength=30)
-#    
-#    class Admin:
-#        nonexistent = 'option'
-#
-#model_errors += """invalid_admin_options.badadminoption: "admin" attribute, if given, must be set to a models.AdminOptions() instance.
-#"""
-        
-class ListDisplayBadOne(models.Model):
-    "Test list_display, list_display must be a list or tuple"
-    first_name = models.CharField(maxlength=30)
-
-    class Admin:
-        list_display = 'first_name'
-
-model_errors += """invalid_admin_options.listdisplaybadone: "admin.list_display", if given, must be set to a list or tuple.
-"""
-
-class ListDisplayBadTwo(models.Model):
-    "Test list_display, list_display items must be attributes, methods or properties."
-    first_name = models.CharField(maxlength=30)
-
-    class Admin:
-        list_display = ['first_name','nonexistent']
-
-model_errors += """invalid_admin_options.listdisplaybadtwo: "admin.list_display" refers to 'nonexistent', which isn't an attribute, method or property.
-"""        
-class ListDisplayBadThree(models.Model):
-    "Test list_display, list_display items can not be a ManyToManyField."
-    first_name = models.CharField(maxlength=30)
-    nick_names = models.ManyToManyField('ListDisplayGood')
-
-    class Admin:
-        list_display = ['first_name','nick_names']
-        
-model_errors += """invalid_admin_options.listdisplaybadthree: "admin.list_display" doesn't support ManyToManyFields ('nick_names').
-""" 
-      
-class ListDisplayGood(models.Model):
-    "Test list_display, Admin list_display can be a attribute, method or property."
-    first_name = models.CharField(maxlength=30)
-    
-    def _last_name(self):
-        return self.first_name
-    last_name = property(_last_name)
-    
-    def full_name(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-    class Admin:
-        list_display = ['first_name','last_name','full_name']
-       
-class ListDisplayLinksBadOne(models.Model):
-    "Test list_display_links, item must be included in list_display."
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    
-    class Admin:
-        list_display = ['last_name']
-        list_display_links = ['first_name']
-
-model_errors += """invalid_admin_options.listdisplaylinksbadone: "admin.list_display_links" refers to 'first_name', which is not defined in "admin.list_display".
-"""
-
-class ListDisplayLinksBadTwo(models.Model):
-    "Test list_display_links, must be a list or tuple."
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    
-    class Admin:
-        list_display = ['first_name','last_name']
-        list_display_links = 'last_name'    
-
-model_errors += """invalid_admin_options.listdisplaylinksbadtwo: "admin.list_display_links", if given, must be set to a list or tuple.
-"""
-
-# TODO: Fix list_display_links validation or remove the check for list_display
-## This is failing but the validation which should fail is not.
-#class ListDisplayLinksBadThree(models.Model):
-#    "Test list_display_links, must define list_display to use list_display_links."
-#    first_name = models.CharField(maxlength=30)
-#    last_name = models.CharField(maxlength=30)
-#    
-#    class Admin:
-#        list_display_links = ('first_name',)
-#
-#model_errors += """invalid_admin_options.listdisplaylinksbadthree: "admin.list_display" must be defined for "admin.list_display_links" to be used.
-#"""
-        
-class ListDisplayLinksGood(models.Model):
-    "Test list_display_links, Admin list_display_list can be a attribute, method or property."
-    first_name = models.CharField(maxlength=30)
-    
-    def _last_name(self):
-        return self.first_name
-    last_name = property(_last_name)
-    
-    def full_name(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-    class Admin:
-        list_display = ['first_name','last_name','full_name']
-        list_display_links = ['first_name','last_name','full_name']
-
-class ListFilterBadOne(models.Model):
-    "Test list_filter, must be a list or tuple."
-    first_name = models.CharField(maxlength=30)
-    
-    class Admin:
-        list_filter = 'first_name'     
-
-model_errors += """invalid_admin_options.listfilterbadone: "admin.list_filter", if given, must be set to a list or tuple.
-"""
-
-class ListFilterBadTwo(models.Model):
-    "Test list_filter, must be a field not a property or method."
-    first_name = models.CharField(maxlength=30)
-    
-    def _last_name(self):
-        return self.first_name
-    last_name = property(_last_name)
-    
-    def full_name(self):
-        return "%s %s" % (self.first_name, self.last_name)
-
-    class Admin:
-        list_filter = ['first_name','last_name','full_name']
-
-model_errors += """invalid_admin_options.listfilterbadtwo: "admin.list_filter" refers to 'last_name', which isn't a field.
-invalid_admin_options.listfilterbadtwo: "admin.list_filter" refers to 'full_name', which isn't a field.
-"""
-
-class DateHierarchyBadOne(models.Model):
-    "Test date_hierarchy, must be a date or datetime field."
-    first_name = models.CharField(maxlength=30)
-    birth_day = models.DateField()
-    
-    class Admin:
-        date_hierarchy = 'first_name'
-        
-# TODO: Date Hierarchy needs to check if field is a date/datetime field.
-#model_errors += """invalid_admin_options.datehierarchybadone: "admin.date_hierarchy" refers to 'first_name', which isn't a date field or datetime field.
-#"""
-
-class DateHierarchyBadTwo(models.Model):
-    "Test date_hieracrhy, must be a field."
-    first_name = models.CharField(maxlength=30)
-    birth_day = models.DateField()
-    
-    class Admin:
-        date_hierarchy = 'nonexistent'          
-
-model_errors += """invalid_admin_options.datehierarchybadtwo: "admin.date_hierarchy" refers to 'nonexistent', which isn't a field.
-"""
-
-class DateHierarchyGood(models.Model):
-    "Test date_hieracrhy, must be a field."
-    first_name = models.CharField(maxlength=30)
-    birth_day = models.DateField()
-    
-    class Admin:
-        date_hierarchy = 'birth_day' 
-      
-class SearchFieldsBadOne(models.Model):
-    "Test search_fields, must be a list or tuple."
-    first_name = models.CharField(maxlength=30)
-    
-    class Admin:
-        search_fields = ('nonexistent')         
-
-# TODO: Add search_fields validation
-#model_errors += """invalid_admin_options.seacrhfieldsbadone: "admin.search_fields", if given, must be set to a list or tuple.
-#"""
-      
-class SearchFieldsBadTwo(models.Model):
-    "Test search_fields, must be a field."
-    first_name = models.CharField(maxlength=30)
-
-    def _last_name(self):
-        return self.first_name
-    last_name = property(_last_name)
-    
-    class Admin:
-        search_fields = ['first_name','last_name']         
-
-# TODO: Add search_fields validation
-#model_errors += """invalid_admin_options.seacrhfieldsbadone: "admin.search_fields" refers to 'last_name', which isn't a field.
-#"""
-
-class SearchFieldsGood(models.Model):
-    "Test search_fields, must be a list or tuple."
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    
-    class Admin:
-        search_fields = ['first_name','last_name']
-
-
-class JsBadOne(models.Model):
-    "Test js, must be a list or tuple"
-    name = models.CharField(maxlength=30)
-    
-    class Admin:
-        js = 'test.js'
-        
-# TODO: Add a js validator
-#model_errors += """invalid_admin_options.jsbadone: "admin.js", if given, must be set to a list or tuple.
-#"""
-
-class SaveAsBad(models.Model):
-    "Test save_as, should be True or False"
-    name = models.CharField(maxlength=30)
-    
-    class Admin:
-        save_as = 'not True or False'
-
-# TODO: Add a save_as validator.       
-#model_errors += """invalid_admin_options.saveasbad: "admin.save_as", if given, must be set to True or False.
-#"""
-
-class SaveOnTopBad(models.Model):
-    "Test save_on_top, should be True or False"
-    name = models.CharField(maxlength=30)
-    
-    class Admin:
-        save_on_top = 'not True or False'
-
-# TODO: Add a save_on_top validator.       
-#model_errors += """invalid_admin_options.saveontopbad: "admin.save_on_top", if given, must be set to True or False.
-#"""
-
-class ListSelectRelatedBad(models.Model):
-    "Test list_select_related, should be True or False"
-    name = models.CharField(maxlength=30)
-    
-    class Admin:
-        list_select_related = 'not True or False'
-
-# TODO: Add a list_select_related validator.       
-#model_errors += """invalid_admin_options.listselectrelatebad: "admin.list_select_related", if given, must be set to True or False.
-#"""
-
-class ListPerPageBad(models.Model):
-    "Test list_per_page, should be a positive integer value."
-    name = models.CharField(maxlength=30)
-    
-    class Admin:
-        list_per_page = 89.3
-
-# TODO: Add a list_per_page validator.       
-#model_errors += """invalid_admin_options.listperpagebad: "admin.list_per_page", if given, must be a positive integer.
-#"""
-
-class FieldsBadOne(models.Model):
-    "Test fields, should be a tuple"
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    
-    class Admin:
-        fields = 'not a tuple'
-
-# TODO: Add a fields validator.       
-#model_errors += """invalid_admin_options.fieldsbadone: "admin.fields", if given, must be a tuple.
-#"""
-
-class FieldsBadTwo(models.Model):
-    """Test fields, 'fields' dict option is required."""
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    
-    class Admin:
-        fields = ('Name', {'description': 'this fieldset needs fields'})
-        
-# TODO: Add a fields validator.       
-#model_errors += """invalid_admin_options.fieldsbadtwo: "admin.fields" each fieldset must include a 'fields' dict.
-#"""
-
-class FieldsBadThree(models.Model):
-    """Test fields, 'classes' and 'description' are the only allowable extra dict options."""
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    
-    class Admin:
-        fields = ('Name', {'fields': ('first_name','last_name'),'badoption': 'verybadoption'})
-
-# TODO: Add a fields validator.       
-#model_errors += """invalid_admin_options.fieldsbadthree: "admin.fields" fieldset options must be either 'classes' or 'description'.
-#"""
-
-class FieldsGood(models.Model):
-    "Test fields, working example"
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    birth_day = models.DateField()
-    
-    class Admin:
-        fields = (
-                  ('Name', {'fields': ('first_name','last_name'),'classes': 'collapse'}),
-                  (None, {'fields': ('birth_day',),'description': 'enter your b-day'})
-                  )
-                  
-class OrderingBad(models.Model):
-    "Test ordering, must be a field."
-    first_name = models.CharField(maxlength=30)
-    last_name = models.CharField(maxlength=30)
-    
-    class Admin:
-        ordering = 'nonexistent'
-
-# TODO: Add a ordering validator.       
-#model_errors += """invalid_admin_options.orderingbad: "admin.ordering" refers to 'nonexistent', which isn't a field.
-#"""
-
-## TODO: Add a manager validator, this should fail gracefully.
-#class ManagerBad(models.Model):
-#    "Test manager, must be a manager object."
-#    first_name = models.CharField(maxlength=30)
-#    
-#    class Admin:
-#        manager = 'nonexistent'
-#       
-#model_errors += """invalid_admin_options.managerbad: "admin.manager" refers to 'nonexistent', which isn't a Manager().
-#"""
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/many_to_one_regress/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,34 +0,0 @@
-from django.db import models
-
-# If ticket #1578 ever slips back in, these models will not be able to be
-# created (the field names being lower-cased versions of their opposite
-# classes is important here).
-
-class First(models.Model):
-    second = models.IntegerField()
-
-class Second(models.Model):
-    first = models.ForeignKey(First, related_name = 'the_first')
-
-# Protect against repetition of #1839, #2415 and #2536.
-class Third(models.Model):
-    name = models.CharField(maxlength=20)
-    third = models.ForeignKey('self', null=True, related_name='child_set')
-
-class Parent(models.Model):
-    name = models.CharField(maxlength=20)
-    bestchild = models.ForeignKey('Child', null=True, related_name='favored_by')
-
-class Child(models.Model):
-    name = models.CharField(maxlength=20)
-    parent = models.ForeignKey(Parent)
-
-
-__test__ = {'API_TESTS':"""
->>> Third.AddManipulator().save(dict(id='3', name='An example', another=None)) 
-<Third: Third object>
->>> parent = Parent(name = 'fred')
->>> parent.save()
->>> Child.AddManipulator().save(dict(name='bam-bam', parent=parent.id))
-<Child: Child object>
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/markup/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,69 +0,0 @@
-# Quick tests for the markup templatetags (django.contrib.markup)
-
-from django.template import Template, Context, add_to_builtins
-import re
-import unittest
-
-add_to_builtins('django.contrib.markup.templatetags.markup')
-
-class Templates(unittest.TestCase):
-    def test_textile(self):
-        try:
-            import textile
-        except ImportError:
-            textile = None
-
-        textile_content = """Paragraph 1
-
-Paragraph 2 with "quotes" and @code@"""
-
-        t = Template("{{ textile_content|textile }}")
-        rendered = t.render(Context(locals())).strip()
-        if textile:
-            self.assertEqual(rendered, """<p>Paragraph 1</p>
-
-<p>Paragraph 2 with &#8220;quotes&#8221; and <code>code</code></p>""")
-        else:
-            self.assertEqual(rendered, textile_content)
-
-    def test_markdown(self):
-        try:
-            import markdown
-        except ImportError:
-            markdown = None
-
-        markdown_content = """Paragraph 1
-
-## An h2"""
-
-        t = Template("{{ markdown_content|markdown }}")
-        rendered = t.render(Context(locals())).strip()
-        if markdown:
-            pattern = re.compile("""<p>Paragraph 1\s*</p>\s*<h2>\s*An h2</h2>""")
-            self.assert_(pattern.match(rendered))
-        else:
-            self.assertEqual(rendered, markdown_content)
-
-    def test_docutils(self):
-        try:
-            import docutils
-        except ImportError:
-            docutils = None
-
-        rest_content = """Paragraph 1
-
-Paragraph 2 with a link_
-
-.. _link: http://www.example.com/"""
-
-        t = Template("{{ rest_content|restructuredtext }}")
-        rendered = t.render(Context(locals())).strip()
-        if docutils:
-            self.assertEqual(rendered, """<p>Paragraph 1</p>
-<p>Paragraph 2 with a <a class="reference" href="http://www.example.com/">link</a></p>""")
-        else:
-            self.assertEqual(rendered, rest_content)
-
-
-if __name__ == '__main__':
-    unittest.main()
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/null_queries/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,54 +0,0 @@
-from django.db import models
-
-class Poll(models.Model):
-    question = models.CharField(maxlength=200)
-
-    def __str__(self):
-        return "Q: %s " % self.question
-
-class Choice(models.Model):
-    poll = models.ForeignKey(Poll)
-    choice = models.CharField(maxlength=200)
-
-    def __str__(self):
-        return "Choice: %s in poll %s" % (self.choice, self.poll)
-
-__test__ = {'API_TESTS':"""
-# Regression test for the use of None as a query value. None is interpreted as 
-# an SQL NULL, but only in __exact queries.
-# Set up some initial polls and choices
->>> p1 = Poll(question='Why?')
->>> p1.save()
->>> c1 = Choice(poll=p1, choice='Because.')
->>> c1.save()
->>> c2 = Choice(poll=p1, choice='Why Not?')
->>> c2.save()
-
-# Exact query with value None returns nothing (=NULL in sql)
->>> Choice.objects.filter(id__exact=None)
-[]
-
-# Valid query, but fails because foo isn't a keyword
->>> Choice.objects.filter(foo__exact=None) 
-Traceback (most recent call last):
-...
-TypeError: Cannot resolve keyword 'foo' into field
-
-# Can't use None on anything other than __exact
->>> Choice.objects.filter(id__gt=None)
-Traceback (most recent call last):
-...
-ValueError: Cannot use None as a query value
-
-# Can't use None on anything other than __exact
->>> Choice.objects.filter(foo__gt=None)
-Traceback (most recent call last):
-...
-ValueError: Cannot use None as a query value
-
-# Related managers use __exact=None implicitly if the object hasn't been saved.
->>> p2 = Poll(question="How?")
->>> p2.choice_set.all()
-[]
-
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/one_to_one_regress/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,37 +0,0 @@
-from django.db import models
-
-class Place(models.Model):
-    name = models.CharField(maxlength=50)
-    address = models.CharField(maxlength=80)
-
-    def __str__(self):
-        return "%s the place" % self.name
-
-class Restaurant(models.Model):
-    place = models.OneToOneField(Place)
-    serves_hot_dogs = models.BooleanField()
-    serves_pizza = models.BooleanField()
-
-    def __str__(self):
-        return "%s the restaurant" % self.place.name
-
-class Favorites(models.Model):
-    name = models.CharField(maxlength = 50)
-    restaurants = models.ManyToManyField(Restaurant)
-
-    def __str__(self):
-        return "Favorites for %s" % self.name
-
-__test__ = {'API_TESTS':"""
-# Regression test for #1064 and #1506: Check that we create models via the m2m
-# relation if the remote model has a OneToOneField.
->>> p1 = Place(name='Demon Dogs', address='944 W. Fullerton')
->>> p1.save()
->>> r = Restaurant(place=p1, serves_hot_dogs=True, serves_pizza=False)
->>> r.save()
->>> f = Favorites(name = 'Fred')
->>> f.save()
->>> f.restaurants = [r]
->>> f.restaurants.all()
-[<Restaurant: Demon Dogs the restaurant>]
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/serializers_regress/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,187 +0,0 @@
-"""
-A test spanning all the capabilities of all the serializers.
-
-This class sets up a model for each model field type 
-(except for image types, because of the PIL dependency).
-"""
-
-from django.db import models
-from django.contrib.contenttypes.models import ContentType
-
-# The following classes are for testing basic data 
-# marshalling, including NULL values.
-
-class BooleanData(models.Model):
-    data = models.BooleanField(null=True)
-    
-class CharData(models.Model):
-    data = models.CharField(maxlength=30, null=True)
-
-class DateData(models.Model):
-    data = models.DateField(null=True)
-
-class DateTimeData(models.Model):
-    data = models.DateTimeField(null=True)
-
-class EmailData(models.Model):
-    data = models.EmailField(null=True)
-
-class FileData(models.Model):
-    data = models.FileField(null=True, upload_to='/foo/bar')
-
-class FilePathData(models.Model):
-    data = models.FilePathField(null=True)
-
-class FloatData(models.Model):
-    data = models.FloatField(null=True, decimal_places=3, max_digits=5)
-
-class IntegerData(models.Model):
-    data = models.IntegerField(null=True)
-
-# class ImageData(models.Model):
-#    data = models.ImageField(null=True)
-
-class IPAddressData(models.Model):
-    data = models.IPAddressField(null=True)
-
-class NullBooleanData(models.Model):
-    data = models.NullBooleanField(null=True)
-
-class PhoneData(models.Model):
-    data = models.PhoneNumberField(null=True)
-
-class PositiveIntegerData(models.Model):
-    data = models.PositiveIntegerField(null=True)
-
-class PositiveSmallIntegerData(models.Model):
-    data = models.PositiveSmallIntegerField(null=True)
-
-class SlugData(models.Model):
-    data = models.SlugField(null=True)
-
-class SmallData(models.Model):
-    data = models.SmallIntegerField(null=True)
-
-class TextData(models.Model):
-    data = models.TextField(null=True)
-
-class TimeData(models.Model):
-    data = models.TimeField(null=True)
-
-class USStateData(models.Model):
-    data = models.USStateField(null=True)
-
-class XMLData(models.Model):
-    data = models.XMLField(null=True)
-    
-class Tag(models.Model):
-    """A tag on an item."""
-    data = models.SlugField()
-    content_type = models.ForeignKey(ContentType)
-    object_id = models.PositiveIntegerField()
-
-    content_object = models.GenericForeignKey()
-
-    class Meta:
-        ordering = ["data"]
-
-class GenericData(models.Model):
-    data = models.CharField(maxlength=30)
-
-    tags = models.GenericRelation(Tag)
-    
-# The following test classes are all for validation
-# of related objects; in particular, forward, backward,
-# and self references.
-    
-class Anchor(models.Model):
-    """This is a model that can be used as 
-    something for other models to point at"""
-    
-    data = models.CharField(maxlength=30)
-    
-class FKData(models.Model):
-    data = models.ForeignKey(Anchor, null=True)
-    
-class M2MData(models.Model):
-    data = models.ManyToManyField(Anchor, null=True)
-    
-class O2OData(models.Model):
-    data = models.OneToOneField(Anchor, null=True)
-
-class FKSelfData(models.Model):
-    data = models.ForeignKey('self', null=True)
-    
-class M2MSelfData(models.Model):
-    data = models.ManyToManyField('self', null=True, symmetrical=False)
-
-# The following test classes are for validating the
-# deserialization of objects that use a user-defined
-# field as the primary key.
-# Some of these data types have been commented out
-# because they can't be used as a primary key on one
-# or all database backends.
-
-class BooleanPKData(models.Model):
-    data = models.BooleanField(primary_key=True)
-    
-class CharPKData(models.Model):
-    data = models.CharField(maxlength=30, primary_key=True)
-
-# class DatePKData(models.Model):
-#    data = models.DateField(primary_key=True)
-
-# class DateTimePKData(models.Model):
-#    data = models.DateTimeField(primary_key=True)
-
-class EmailPKData(models.Model):
-    data = models.EmailField(primary_key=True)
-
-class FilePKData(models.Model):
-    data = models.FileField(primary_key=True, upload_to='/foo/bar')
-
-class FilePathPKData(models.Model):
-    data = models.FilePathField(primary_key=True)
-
-class FloatPKData(models.Model):
-    data = models.FloatField(primary_key=True, decimal_places=3, max_digits=5)
-
-class IntegerPKData(models.Model):
-    data = models.IntegerField(primary_key=True)
-
-# class ImagePKData(models.Model):
-#    data = models.ImageField(primary_key=True)
-
-class IPAddressPKData(models.Model):
-    data = models.IPAddressField(primary_key=True)
-
-class NullBooleanPKData(models.Model):
-    data = models.NullBooleanField(primary_key=True)
-
-class PhonePKData(models.Model):
-    data = models.PhoneNumberField(primary_key=True)
-
-class PositiveIntegerPKData(models.Model):
-    data = models.PositiveIntegerField(primary_key=True)
-
-class PositiveSmallIntegerPKData(models.Model):
-    data = models.PositiveSmallIntegerField(primary_key=True)
-
-class SlugPKData(models.Model):
-    data = models.SlugField(primary_key=True)
-
-class SmallPKData(models.Model):
-    data = models.SmallIntegerField(primary_key=True)
-
-# class TextPKData(models.Model):
-#     data = models.TextField(primary_key=True)
-
-# class TimePKData(models.Model):
-#    data = models.TimeField(primary_key=True)
-
-class USStatePKData(models.Model):
-    data = models.USStateField(primary_key=True)
-
-# class XMLPKData(models.Model):
-#     data = models.XMLField(primary_key=True)
-
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/serializers_regress/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,263 +0,0 @@
-"""
-A test spanning all the capabilities of all the serializers.
-
-This class defines sample data and a dynamically generated
-test case that is capable of testing the capabilities of 
-the serializers. This includes all valid data values, plus
-forward, backwards and self references.
-"""
-
-
-import unittest, datetime
-
-from django.utils.functional import curry
-from django.core import serializers
-from django.db import transaction
-from django.core import management
-
-from models import *
-
-# A set of functions that can be used to recreate
-# test data objects of various kinds
-def data_create(pk, klass, data):
-    instance = klass(id=pk)
-    instance.data = data
-    instance.save()    
-    return instance
-
-def generic_create(pk, klass, data):
-    instance = klass(id=pk)
-    instance.data = data[0]
-    instance.save()
-    for tag in data[1:]:
-        instance.tags.create(data=tag)
-    return instance
-    
-def fk_create(pk, klass, data):
-    instance = klass(id=pk)
-    setattr(instance, 'data_id', data)
-    instance.save()
-    return instance
-    
-def m2m_create(pk, klass, data):
-    instance = klass(id=pk)
-    instance.save()
-    instance.data = data
-    return instance
-
-def o2o_create(pk, klass, data):
-    instance = klass()
-    instance.data_id = data
-    instance.save()
-    return instance
-
-def pk_create(pk, klass, data):
-    instance = klass()
-    instance.data = data
-    instance.save()
-    return instance
-
-# A set of functions that can be used to compare
-# test data objects of various kinds
-def data_compare(testcase, pk, klass, data):
-    instance = klass.objects.get(id=pk)
-    testcase.assertEqual(data, instance.data, 
-                         "Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (pk,data, type(data), instance.data, type(instance.data)))
-
-def generic_compare(testcase, pk, klass, data):
-    instance = klass.objects.get(id=pk)
-    testcase.assertEqual(data[0], instance.data)
-    testcase.assertEqual(data[1:], [t.data for t in instance.tags.all()])
-    
-def fk_compare(testcase, pk, klass, data):
-    instance = klass.objects.get(id=pk)
-    testcase.assertEqual(data, instance.data_id)
-
-def m2m_compare(testcase, pk, klass, data):
-    instance = klass.objects.get(id=pk)
-    testcase.assertEqual(data, [obj.id for obj in instance.data.all()])
-
-def o2o_compare(testcase, pk, klass, data):
-    instance = klass.objects.get(data=data)
-    testcase.assertEqual(data, instance.data_id)
-
-def pk_compare(testcase, pk, klass, data):
-    instance = klass.objects.get(data=data)
-    testcase.assertEqual(data, instance.data)
-        
-# Define some data types. Each data type is
-# actually a pair of functions; one to create
-# and one to compare objects of that type
-data_obj = (data_create, data_compare)
-generic_obj = (generic_create, generic_compare)
-fk_obj = (fk_create, fk_compare)
-m2m_obj = (m2m_create, m2m_compare)
-o2o_obj = (o2o_create, o2o_compare)
-pk_obj = (pk_create, pk_compare)
-
-test_data = [
-    # Format: (data type, PK value, Model Class, data)  
-    (data_obj, 1, BooleanData, True),
-    (data_obj, 2, BooleanData, False),
-    (data_obj, 10, CharData, "Test Char Data"),
-    (data_obj, 11, CharData, ""),
-    (data_obj, 12, CharData, "None"),
-    (data_obj, 13, CharData, "null"),
-    (data_obj, 14, CharData, "NULL"),
-    (data_obj, 15, CharData, None),
-    (data_obj, 20, DateData, datetime.date(2006,6,16)),
-    (data_obj, 21, DateData, None),
-    (data_obj, 30, DateTimeData, datetime.datetime(2006,6,16,10,42,37)),
-    (data_obj, 31, DateTimeData, None),
-    (data_obj, 40, EmailData, "hovercraft@example.com"),
-    (data_obj, 41, EmailData, None),
-    (data_obj, 50, FileData, 'file:///foo/bar/whiz.txt'),
-    (data_obj, 51, FileData, None),
-    (data_obj, 60, FilePathData, "/foo/bar/whiz.txt"),
-    (data_obj, 61, FilePathData, None),
-    (data_obj, 70, FloatData, 12.345),
-    (data_obj, 71, FloatData, -12.345),
-    (data_obj, 72, FloatData, 0.0),
-    (data_obj, 73, FloatData, None),
-    (data_obj, 80, IntegerData, 123456789),
-    (data_obj, 81, IntegerData, -123456789),
-    (data_obj, 82, IntegerData, 0),
-    (data_obj, 83, IntegerData, None),
-    #(XX, ImageData
-    (data_obj, 90, IPAddressData, "127.0.0.1"),
-    (data_obj, 91, IPAddressData, None),
-    (data_obj, 100, NullBooleanData, True),
-    (data_obj, 101, NullBooleanData, False),
-    (data_obj, 102, NullBooleanData, None),
-    (data_obj, 110, PhoneData, "212-634-5789"),
-    (data_obj, 111, PhoneData, None),
-    (data_obj, 120, PositiveIntegerData, 123456789),
-    (data_obj, 121, PositiveIntegerData, None),
-    (data_obj, 130, PositiveSmallIntegerData, 12),
-    (data_obj, 131, PositiveSmallIntegerData, None),
-    (data_obj, 140, SlugData, "this-is-a-slug"),
-    (data_obj, 141, SlugData, None),
-    (data_obj, 150, SmallData, 12), 
-    (data_obj, 151, SmallData, -12), 
-    (data_obj, 152, SmallData, 0), 
-    (data_obj, 153, SmallData, None), 
-    (data_obj, 160, TextData, """This is a long piece of text.
-It contains line breaks.
-Several of them.
-The end."""),
-    (data_obj, 161, TextData, ""),
-    (data_obj, 162, TextData, None),
-    (data_obj, 170, TimeData, datetime.time(10,42,37)),
-    (data_obj, 171, TimeData, None),
-    (data_obj, 180, USStateData, "MA"),
-    (data_obj, 181, USStateData, None),
-    (data_obj, 190, XMLData, "<foo></foo>"),
-    (data_obj, 191, XMLData, None),
-
-    (generic_obj, 200, GenericData, ['Generic Object 1', 'tag1', 'tag2']),
-    (generic_obj, 201, GenericData, ['Generic Object 2', 'tag2', 'tag3']),
-
-    (data_obj, 300, Anchor, "Anchor 1"),
-    (data_obj, 301, Anchor, "Anchor 2"),
-
-    (fk_obj, 400, FKData, 300), # Post reference
-    (fk_obj, 401, FKData, 500), # Pre reference
-    (fk_obj, 402, FKData, None), # Empty reference
-
-    (m2m_obj, 410, M2MData, []), # Empty set
-    (m2m_obj, 411, M2MData, [300,301]), # Post reference
-    (m2m_obj, 412, M2MData, [500,501]), # Pre reference
-    (m2m_obj, 413, M2MData, [300,301,500,501]), # Pre and Post reference
-
-    (o2o_obj, None, O2OData, 300), # Post reference
-    (o2o_obj, None, O2OData, 500), # Pre reference
-
-    (fk_obj, 430, FKSelfData, 431), # Pre reference
-    (fk_obj, 431, FKSelfData, 430), # Post reference
-    (fk_obj, 432, FKSelfData, None), # Empty reference
-
-    (m2m_obj, 440, M2MSelfData, []),
-    (m2m_obj, 441, M2MSelfData, []),
-    (m2m_obj, 442, M2MSelfData, [440, 441]),
-    (m2m_obj, 443, M2MSelfData, [445, 446]),
-    (m2m_obj, 444, M2MSelfData, [440, 441, 445, 446]),
-    (m2m_obj, 445, M2MSelfData, []),
-    (m2m_obj, 446, M2MSelfData, []),
-
-    (data_obj, 500, Anchor, "Anchor 3"),
-    (data_obj, 501, Anchor, "Anchor 4"),
-
-    (pk_obj, 601, BooleanPKData, True),
-    (pk_obj, 602, BooleanPKData, False),
-    (pk_obj, 610, CharPKData, "Test Char PKData"),
-#     (pk_obj, 620, DatePKData, datetime.date(2006,6,16)),
-#     (pk_obj, 630, DateTimePKData, datetime.datetime(2006,6,16,10,42,37)),
-    (pk_obj, 640, EmailPKData, "hovercraft@example.com"),
-    (pk_obj, 650, FilePKData, 'file:///foo/bar/whiz.txt'),
-    (pk_obj, 660, FilePathPKData, "/foo/bar/whiz.txt"),
-    (pk_obj, 670, FloatPKData, 12.345),
-    (pk_obj, 671, FloatPKData, -12.345),
-    (pk_obj, 672, FloatPKData, 0.0),
-    (pk_obj, 680, IntegerPKData, 123456789),
-    (pk_obj, 681, IntegerPKData, -123456789),
-    (pk_obj, 682, IntegerPKData, 0),
-#     (XX, ImagePKData
-    (pk_obj, 690, IPAddressPKData, "127.0.0.1"),
-    (pk_obj, 700, NullBooleanPKData, True),
-    (pk_obj, 701, NullBooleanPKData, False),
-    (pk_obj, 710, PhonePKData, "212-634-5789"),
-    (pk_obj, 720, PositiveIntegerPKData, 123456789),
-    (pk_obj, 730, PositiveSmallIntegerPKData, 12),
-    (pk_obj, 740, SlugPKData, "this-is-a-slug"),
-    (pk_obj, 750, SmallPKData, 12), 
-    (pk_obj, 751, SmallPKData, -12), 
-    (pk_obj, 752, SmallPKData, 0), 
-#     (pk_obj, 760, TextPKData, """This is a long piece of text.
-# It contains line breaks.
-# Several of them.
-# The end."""),
-#    (pk_obj, 770, TimePKData, datetime.time(10,42,37)),
-    (pk_obj, 780, USStatePKData, "MA"),
-#     (pk_obj, 790, XMLPKData, "<foo></foo>"),
-]
-    
-# Dynamically create serializer tests to ensure that all
-# registered serializers are automatically tested.
-class SerializerTests(unittest.TestCase):
-    pass
-
-def serializerTest(format, self):
-    # Clear the database first
-    management.flush(verbosity=0, interactive=False)    
-
-    # Create all the objects defined in the test data
-    objects = []
-    transaction.enter_transaction_management()
-    transaction.managed(True)
-    for (func, pk, klass, datum) in test_data:
-        objects.append(func[0](pk, klass, datum))
-    transaction.commit()
-    transaction.leave_transaction_management()
-
-    # Add the generic tagged objects to the object list 
-    objects.extend(Tag.objects.all())
-    
-    # Serialize the test database
-    serialized_data = serializers.serialize(format, objects, indent=2)
-
-    # Flush the database and recreate from the serialized data
-    management.flush(verbosity=0, interactive=False)    
-    transaction.enter_transaction_management()
-    transaction.managed(True)
-    for obj in serializers.deserialize(format, serialized_data):
-        obj.save()
-    transaction.commit()
-    transaction.leave_transaction_management()
-
-    # Assert that the deserialized data is the same 
-    # as the original source
-    for (func, pk, klass, datum) in test_data:
-        func[1](self, pk, klass, datum)
-    
-for format in serializers.get_serializer_formats():
-    setattr(SerializerTests, 'test_'+format+'_serializer', curry(serializerTest, format))
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/string_lookup/models.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,69 +0,0 @@
-from django.db import models
-
-class Foo(models.Model):
-    name = models.CharField(maxlength=50)
-
-    def __str__(self):
-        return "Foo %s" % self.name
-
-class Bar(models.Model):
-    name = models.CharField(maxlength=50)
-    normal = models.ForeignKey(Foo, related_name='normal_foo')
-    fwd = models.ForeignKey("Whiz")
-    back = models.ForeignKey("Foo")
-
-    def __str__(self):
-        return "Bar %s" % self.place.name
-
-class Whiz(models.Model):
-    name = models.CharField(maxlength = 50)
-
-    def __str__(self):
-        return "Whiz %s" % self.name
-
-class Child(models.Model):
-    parent = models.OneToOneField('Base')
-    name = models.CharField(maxlength = 50)
-
-    def __str__(self):
-        return "Child %s" % self.name
-    
-class Base(models.Model):
-    name = models.CharField(maxlength = 50)
-
-    def __str__(self):
-        return "Base %s" % self.name
-
-__test__ = {'API_TESTS':"""
-# Regression test for #1661 and #1662: Check that string form referencing of models works, 
-# both as pre and post reference, on all RelatedField types.
-
->>> f1 = Foo(name="Foo1")
->>> f1.save()
->>> f2 = Foo(name="Foo1")
->>> f2.save()
-
->>> w1 = Whiz(name="Whiz1")
->>> w1.save()
-
->>> b1 = Bar(name="Bar1", normal=f1, fwd=w1, back=f2)
->>> b1.save()
-
->>> b1.normal
-<Foo: Foo Foo1>
-
->>> b1.fwd
-<Whiz: Whiz Whiz1>
-
->>> b1.back
-<Foo: Foo Foo1>
-
->>> base1 = Base(name="Base1")
->>> base1.save()
-
->>> child1 = Child(name="Child1", parent=base1)
->>> child1.save()
-
->>> child1.parent
-<Base: Base Base1>
-"""}
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/templates/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,757 +0,0 @@
-# -*- coding: utf-8 -*-
-from django.conf import settings
-
-if __name__ == '__main__':
-    # When running this file in isolation, we need to set up the configuration
-    # before importing 'template'.
-    settings.configure()
-
-from django import template
-from django.template import loader
-from django.utils.translation import activate, deactivate, install
-from django.utils.tzinfo import LocalTimezone
-from datetime import datetime, timedelta
-import unittest
-
-#################################
-# Custom template tag for tests #
-#################################
-
-register = template.Library()
-
-class EchoNode(template.Node):
-    def __init__(self, contents):
-        self.contents = contents
-
-    def render(self, context):
-        return " ".join(self.contents)
-
-def do_echo(parser, token):
-    return EchoNode(token.contents.split()[1:])
-
-register.tag("echo", do_echo)
-
-template.libraries['django.templatetags.testtags'] = register
-
-#####################################
-# Helper objects for template tests #
-#####################################
-
-class SomeException(Exception):
-    silent_variable_failure = True
-
-class SomeOtherException(Exception):
-    pass
-
-class SomeClass:
-    def __init__(self):
-        self.otherclass = OtherClass()
-
-    def method(self):
-        return "SomeClass.method"
-
-    def method2(self, o):
-        return o
-
-    def method3(self):
-        raise SomeException
-
-    def method4(self):
-        raise SomeOtherException
-
-class OtherClass:
-    def method(self):
-        return "OtherClass.method"
-
-class UnicodeInStrClass:
-    "Class whose __str__ returns a Unicode object."
-    def __str__(self):
-        return u'ŠĐĆŽćžšđ'
-
-class Templates(unittest.TestCase):
-    def test_templates(self):
-        # NOW and NOW_tz are used by timesince tag tests.
-        NOW = datetime.now()
-        NOW_tz = datetime.now(LocalTimezone(datetime.now()))
-
-        # SYNTAX --
-        # 'template_name': ('template contents', 'context dict', 'expected string output' or Exception class)
-        TEMPLATE_TESTS = {
-
-            ### BASIC SYNTAX ##########################################################
-
-            # Plain text should go through the template parser untouched
-            'basic-syntax01': ("something cool", {}, "something cool"),
-
-            # Variables should be replaced with their value in the current context
-            'basic-syntax02': ("{{ headline }}", {'headline':'Success'}, "Success"),
-
-            # More than one replacement variable is allowed in a template
-            'basic-syntax03': ("{{ first }} --- {{ second }}", {"first" : 1, "second" : 2}, "1 --- 2"),
-
-            # Fail silently when a variable is not found in the current context
-            'basic-syntax04': ("as{{ missing }}df", {}, ("asdf","asINVALIDdf")),
-
-            # A variable may not contain more than one word
-            'basic-syntax06': ("{{ multi word variable }}", {}, template.TemplateSyntaxError),
-
-            # Raise TemplateSyntaxError for empty variable tags
-            'basic-syntax07': ("{{ }}",        {}, template.TemplateSyntaxError),
-            'basic-syntax08': ("{{        }}", {}, template.TemplateSyntaxError),
-
-            # Attribute syntax allows a template to call an object's attribute
-            'basic-syntax09': ("{{ var.method }}", {"var": SomeClass()}, "SomeClass.method"),
-
-            # Multiple levels of attribute access are allowed
-            'basic-syntax10': ("{{ var.otherclass.method }}", {"var": SomeClass()}, "OtherClass.method"),
-
-            # Fail silently when a variable's attribute isn't found
-            'basic-syntax11': ("{{ var.blech }}", {"var": SomeClass()}, ("","INVALID")),
-
-            # Raise TemplateSyntaxError when trying to access a variable beginning with an underscore
-            'basic-syntax12': ("{{ var.__dict__ }}", {"var": SomeClass()}, template.TemplateSyntaxError),
-
-            # Raise TemplateSyntaxError when trying to access a variable containing an illegal character
-            'basic-syntax13': ("{{ va>r }}", {}, template.TemplateSyntaxError),
-            'basic-syntax14': ("{{ (var.r) }}", {}, template.TemplateSyntaxError),
-            'basic-syntax15': ("{{ sp%am }}", {}, template.TemplateSyntaxError),
-            'basic-syntax16': ("{{ eggs! }}", {}, template.TemplateSyntaxError),
-            'basic-syntax17': ("{{ moo? }}", {}, template.TemplateSyntaxError),
-
-            # Attribute syntax allows a template to call a dictionary key's value
-            'basic-syntax18': ("{{ foo.bar }}", {"foo" : {"bar" : "baz"}}, "baz"),
-
-            # Fail silently when a variable's dictionary key isn't found
-            'basic-syntax19': ("{{ foo.spam }}", {"foo" : {"bar" : "baz"}}, ("","INVALID")),
-
-            # Fail silently when accessing a non-simple method
-            'basic-syntax20': ("{{ var.method2 }}", {"var": SomeClass()}, ("","INVALID")),
-
-            # List-index syntax allows a template to access a certain item of a subscriptable object.
-            'list-index01': ("{{ var.1 }}", {"var": ["first item", "second item"]}, "second item"),
-
-            # Fail silently when the list index is out of range.
-            'list-index02': ("{{ var.5 }}", {"var": ["first item", "second item"]}, ("", "INVALID")),
-
-            # Fail silently when the variable is not a subscriptable object.
-            'list-index03': ("{{ var.1 }}", {"var": None}, ("", "INVALID")),
-
-            # Fail silently when variable is a dict without the specified key.
-            'list-index04': ("{{ var.1 }}", {"var": {}}, ("", "INVALID")),
-
-            # Dictionary lookup wins out when dict's key is a string.
-            'list-index05': ("{{ var.1 }}", {"var": {'1': "hello"}}, "hello"),
-
-            # But list-index lookup wins out when dict's key is an int, which
-            # behind the scenes is really a dictionary lookup (for a dict)
-            # after converting the key to an int.
-            'list-index06': ("{{ var.1 }}", {"var": {1: "hello"}}, "hello"),
-
-            # Dictionary lookup wins out when there is a string and int version of the key.
-            'list-index07': ("{{ var.1 }}", {"var": {'1': "hello", 1: "world"}}, "hello"),
-            
-            # Basic filter usage
-            'basic-syntax21': ("{{ var|upper }}", {"var": "Django is the greatest!"}, "DJANGO IS THE GREATEST!"),
-
-            # Chained filters
-            'basic-syntax22': ("{{ var|upper|lower }}", {"var": "Django is the greatest!"}, "django is the greatest!"),
-
-            # Raise TemplateSyntaxError for space between a variable and filter pipe
-            'basic-syntax23': ("{{ var |upper }}", {}, template.TemplateSyntaxError),
-
-            # Raise TemplateSyntaxError for space after a filter pipe
-            'basic-syntax24': ("{{ var| upper }}", {}, template.TemplateSyntaxError),
-
-            # Raise TemplateSyntaxError for a nonexistent filter
-            'basic-syntax25': ("{{ var|does_not_exist }}", {}, template.TemplateSyntaxError),
-
-            # Raise TemplateSyntaxError when trying to access a filter containing an illegal character
-            'basic-syntax26': ("{{ var|fil(ter) }}", {}, template.TemplateSyntaxError),
-
-            # Raise TemplateSyntaxError for invalid block tags
-            'basic-syntax27': ("{% nothing_to_see_here %}", {}, template.TemplateSyntaxError),
-
-            # Raise TemplateSyntaxError for empty block tags
-            'basic-syntax28': ("{% %}", {}, template.TemplateSyntaxError),
-
-            # Chained filters, with an argument to the first one
-            'basic-syntax29': ('{{ var|removetags:"b i"|upper|lower }}', {"var": "<b><i>Yes</i></b>"}, "yes"),
-
-            # Escaped string as argument
-            'basic-syntax30': (r'{{ var|default_if_none:" endquote\" hah" }}', {"var": None}, ' endquote" hah'),
-
-            # Variable as argument
-            'basic-syntax31': (r'{{ var|default_if_none:var2 }}', {"var": None, "var2": "happy"}, 'happy'),
-
-            # Default argument testing
-            'basic-syntax32': (r'{{ var|yesno:"yup,nup,mup" }} {{ var|yesno }}', {"var": True}, 'yup yes'),
-
-            # Fail silently for methods that raise an exception with a "silent_variable_failure" attribute
-            'basic-syntax33': (r'1{{ var.method3 }}2', {"var": SomeClass()}, ("12", "1INVALID2")),
-
-            # In methods that raise an exception without a "silent_variable_attribute" set to True,
-            # the exception propagates
-            'basic-syntax34': (r'1{{ var.method4 }}2', {"var": SomeClass()}, SomeOtherException),
-
-            # Escaped backslash in argument
-            'basic-syntax35': (r'{{ var|default_if_none:"foo\bar" }}', {"var": None}, r'foo\bar'),
-
-            # Escaped backslash using known escape char
-            'basic-syntax35': (r'{{ var|default_if_none:"foo\now" }}', {"var": None}, r'foo\now'),
-
-            # Empty strings can be passed as arguments to filters
-            'basic-syntax36': (r'{{ var|join:"" }}', {'var': ['a', 'b', 'c']}, 'abc'),
-
-            # If a variable has a __str__() that returns a Unicode object, the value
-            # will be converted to a bytestring.
-            'basic-syntax37': (r'{{ var }}', {'var': UnicodeInStrClass()}, '\xc5\xa0\xc4\x90\xc4\x86\xc5\xbd\xc4\x87\xc5\xbe\xc5\xa1\xc4\x91'),
-
-            ### COMMENT SYNTAX ########################################################
-            'comment-syntax01': ("{# this is hidden #}hello", {}, "hello"),
-            'comment-syntax02': ("{# this is hidden #}hello{# foo #}", {}, "hello"),
-
-            # Comments can contain invalid stuff.
-            'comment-syntax03': ("foo{#  {% if %}  #}", {}, "foo"),
-            'comment-syntax04': ("foo{#  {% endblock %}  #}", {}, "foo"),
-            'comment-syntax05': ("foo{#  {% somerandomtag %}  #}", {}, "foo"),
-            'comment-syntax06': ("foo{# {% #}", {}, "foo"),
-            'comment-syntax07': ("foo{# %} #}", {}, "foo"),
-            'comment-syntax08': ("foo{# %} #}bar", {}, "foobar"),
-            'comment-syntax09': ("foo{# {{ #}", {}, "foo"),
-            'comment-syntax10': ("foo{# }} #}", {}, "foo"),
-            'comment-syntax11': ("foo{# { #}", {}, "foo"),
-            'comment-syntax12': ("foo{# } #}", {}, "foo"),
-
-            ### COMMENT TAG ###########################################################
-            'comment-tag01': ("{% comment %}this is hidden{% endcomment %}hello", {}, "hello"),
-            'comment-tag02': ("{% comment %}this is hidden{% endcomment %}hello{% comment %}foo{% endcomment %}", {}, "hello"),
-
-            # Comment tag can contain invalid stuff.
-            'comment-tag03': ("foo{% comment %} {% if %} {% endcomment %}", {}, "foo"),
-            'comment-tag04': ("foo{% comment %} {% endblock %} {% endcomment %}", {}, "foo"),
-            'comment-tag05': ("foo{% comment %} {% somerandomtag %} {% endcomment %}", {}, "foo"),
-
-            ### CYCLE TAG #############################################################
-            'cycle01': ('{% cycle a %}', {}, template.TemplateSyntaxError),
-            'cycle02': ('{% cycle a,b,c as abc %}{% cycle abc %}', {}, 'ab'),
-            'cycle03': ('{% cycle a,b,c as abc %}{% cycle abc %}{% cycle abc %}', {}, 'abc'),
-            'cycle04': ('{% cycle a,b,c as abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}', {}, 'abca'),
-            'cycle05': ('{% cycle %}', {}, template.TemplateSyntaxError),
-            'cycle06': ('{% cycle a %}', {}, template.TemplateSyntaxError),
-            'cycle07': ('{% cycle a,b,c as foo %}{% cycle bar %}', {}, template.TemplateSyntaxError),
-            'cycle08': ('{% cycle a,b,c as foo %}{% cycle foo %}{{ foo }}{{ foo }}{% cycle foo %}{{ foo }}', {}, 'abbbcc'),
-
-            ### EXCEPTIONS ############################################################
-
-            # Raise exception for invalid template name
-            'exception01': ("{% extends 'nonexistent' %}", {}, template.TemplateSyntaxError),
-
-            # Raise exception for invalid template name (in variable)
-            'exception02': ("{% extends nonexistent %}", {}, template.TemplateSyntaxError),
-
-            # Raise exception for extra {% extends %} tags
-            'exception03': ("{% extends 'inheritance01' %}{% block first %}2{% endblock %}{% extends 'inheritance16' %}", {}, template.TemplateSyntaxError),
-
-            # Raise exception for custom tags used in child with {% load %} tag in parent, not in child
-            'exception04': ("{% extends 'inheritance17' %}{% block first %}{% echo 400 %}5678{% endblock %}", {}, template.TemplateSyntaxError),
-
-            ### FILTER TAG ############################################################
-            'filter01': ('{% filter upper %}{% endfilter %}', {}, ''),
-            'filter02': ('{% filter upper %}django{% endfilter %}', {}, 'DJANGO'),
-            'filter03': ('{% filter upper|lower %}django{% endfilter %}', {}, 'django'),
-
-            ### FIRSTOF TAG ###########################################################
-            'firstof01': ('{% firstof a b c %}', {'a':0,'b':0,'c':0}, ''),
-            'firstof02': ('{% firstof a b c %}', {'a':1,'b':0,'c':0}, '1'),
-            'firstof03': ('{% firstof a b c %}', {'a':0,'b':2,'c':0}, '2'),
-            'firstof04': ('{% firstof a b c %}', {'a':0,'b':0,'c':3}, '3'),
-            'firstof05': ('{% firstof a b c %}', {'a':1,'b':2,'c':3}, '1'),
-            'firstof06': ('{% firstof %}', {}, template.TemplateSyntaxError),
-
-            ### FOR TAG ###############################################################
-            'for-tag01': ("{% for val in values %}{{ val }}{% endfor %}", {"values": [1, 2, 3]}, "123"),
-            'for-tag02': ("{% for val in values reversed %}{{ val }}{% endfor %}", {"values": [1, 2, 3]}, "321"),
-            'for-tag-vars01': ("{% for val in values %}{{ forloop.counter }}{% endfor %}", {"values": [6, 6, 6]}, "123"),
-            'for-tag-vars02': ("{% for val in values %}{{ forloop.counter0 }}{% endfor %}", {"values": [6, 6, 6]}, "012"),
-            'for-tag-vars03': ("{% for val in values %}{{ forloop.revcounter }}{% endfor %}", {"values": [6, 6, 6]}, "321"),
-            'for-tag-vars04': ("{% for val in values %}{{ forloop.revcounter0 }}{% endfor %}", {"values": [6, 6, 6]}, "210"),
-
-            ### IF TAG ################################################################
-            'if-tag01': ("{% if foo %}yes{% else %}no{% endif %}", {"foo": True}, "yes"),
-            'if-tag02': ("{% if foo %}yes{% else %}no{% endif %}", {"foo": False}, "no"),
-            'if-tag03': ("{% if foo %}yes{% else %}no{% endif %}", {}, "no"),
-
-            # AND
-            'if-tag-and01': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'yes'),
-            'if-tag-and02': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'no'),
-            'if-tag-and03': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'no'),
-            'if-tag-and04': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'no'),
-            'if-tag-and05': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': False}, 'no'),
-            'if-tag-and06': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'bar': False}, 'no'),
-            'if-tag-and07': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': True}, 'no'),
-            'if-tag-and08': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'bar': True}, 'no'),
-
-            # OR
-            'if-tag-or01': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'yes'),
-            'if-tag-or02': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'yes'),
-            'if-tag-or03': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'yes'),
-            'if-tag-or04': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'no'),
-            'if-tag-or05': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': False}, 'no'),
-            'if-tag-or06': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'bar': False}, 'no'),
-            'if-tag-or07': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': True}, 'yes'),
-            'if-tag-or08': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'bar': True}, 'yes'),
-
-            # TODO: multiple ORs
-
-            # NOT
-            'if-tag-not01': ("{% if not foo %}no{% else %}yes{% endif %}", {'foo': True}, 'yes'),
-            'if-tag-not02': ("{% if not %}yes{% else %}no{% endif %}", {'foo': True}, 'no'),
-            'if-tag-not03': ("{% if not %}yes{% else %}no{% endif %}", {'not': True}, 'yes'),
-            'if-tag-not04': ("{% if not not %}no{% else %}yes{% endif %}", {'not': True}, 'yes'),
-            'if-tag-not05': ("{% if not not %}no{% else %}yes{% endif %}", {}, 'no'),
-
-            'if-tag-not06': ("{% if foo and not bar %}yes{% else %}no{% endif %}", {}, 'no'),
-            'if-tag-not07': ("{% if foo and not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'no'),
-            'if-tag-not08': ("{% if foo and not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'yes'),
-            'if-tag-not09': ("{% if foo and not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'no'),
-            'if-tag-not10': ("{% if foo and not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'no'),
-
-            'if-tag-not11': ("{% if not foo and bar %}yes{% else %}no{% endif %}", {}, 'no'),
-            'if-tag-not12': ("{% if not foo and bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'no'),
-            'if-tag-not13': ("{% if not foo and bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'no'),
-            'if-tag-not14': ("{% if not foo and bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'yes'),
-            'if-tag-not15': ("{% if not foo and bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'no'),
-
-            'if-tag-not16': ("{% if foo or not bar %}yes{% else %}no{% endif %}", {}, 'yes'),
-            'if-tag-not17': ("{% if foo or not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'yes'),
-            'if-tag-not18': ("{% if foo or not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'yes'),
-            'if-tag-not19': ("{% if foo or not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'no'),
-            'if-tag-not20': ("{% if foo or not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'yes'),
-
-            'if-tag-not21': ("{% if not foo or bar %}yes{% else %}no{% endif %}", {}, 'yes'),
-            'if-tag-not22': ("{% if not foo or bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'yes'),
-            'if-tag-not23': ("{% if not foo or bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'no'),
-            'if-tag-not24': ("{% if not foo or bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'yes'),
-            'if-tag-not25': ("{% if not foo or bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'yes'),
-
-            'if-tag-not26': ("{% if not foo and not bar %}yes{% else %}no{% endif %}", {}, 'yes'),
-            'if-tag-not27': ("{% if not foo and not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'no'),
-            'if-tag-not28': ("{% if not foo and not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'no'),
-            'if-tag-not29': ("{% if not foo and not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'no'),
-            'if-tag-not30': ("{% if not foo and not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'yes'),
-
-            'if-tag-not31': ("{% if not foo or not bar %}yes{% else %}no{% endif %}", {}, 'yes'),
-            'if-tag-not32': ("{% if not foo or not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'no'),
-            'if-tag-not33': ("{% if not foo or not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'yes'),
-            'if-tag-not34': ("{% if not foo or not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'yes'),
-            'if-tag-not35': ("{% if not foo or not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'yes'),
-
-            # AND and OR raises a TemplateSyntaxError
-            'if-tag-error01': ("{% if foo or bar and baz %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, template.TemplateSyntaxError),
-            'if-tag-error02': ("{% if foo and %}yes{% else %}no{% endif %}", {'foo': True}, template.TemplateSyntaxError),
-            'if-tag-error03': ("{% if foo or %}yes{% else %}no{% endif %}", {'foo': True}, template.TemplateSyntaxError),
-            'if-tag-error04': ("{% if not foo and %}yes{% else %}no{% endif %}", {'foo': True}, template.TemplateSyntaxError),
-            'if-tag-error05': ("{% if not foo or %}yes{% else %}no{% endif %}", {'foo': True}, template.TemplateSyntaxError),
-
-            ### IFCHANGED TAG #########################################################
-            'ifchanged01': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% endfor %}', { 'num': (1,2,3) }, '123'),
-            'ifchanged02': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% endfor %}', { 'num': (1,1,3) }, '13'),
-            'ifchanged03': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% endfor %}', { 'num': (1,1,1) }, '1'),
-            'ifchanged04': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% for x in numx %}{% ifchanged %}{{ x }}{% endifchanged %}{% endfor %}{% endfor %}', { 'num': (1, 2, 3), 'numx': (2, 2, 2)}, '122232'),
-            'ifchanged05': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% for x in numx %}{% ifchanged %}{{ x }}{% endifchanged %}{% endfor %}{% endfor %}', { 'num': (1, 1, 1), 'numx': (1, 2, 3)}, '1123123123'),
-            'ifchanged06': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% for x in numx %}{% ifchanged %}{{ x }}{% endifchanged %}{% endfor %}{% endfor %}', { 'num': (1, 1, 1), 'numx': (2, 2, 2)}, '1222'),
-            'ifchanged07': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% for x in numx %}{% ifchanged %}{{ x }}{% endifchanged %}{% for y in numy %}{% ifchanged %}{{ y }}{% endifchanged %}{% endfor %}{% endfor %}{% endfor %}', { 'num': (1, 1, 1), 'numx': (2, 2, 2), 'numy': (3, 3, 3)}, '1233323332333'),
-
-            # Test one parameter given to ifchanged.
-            'ifchanged-param01': ('{% for n in num %}{% ifchanged n %}..{% endifchanged %}{{ n }}{% endfor %}', { 'num': (1,2,3) }, '..1..2..3'),
-            'ifchanged-param02': ('{% for n in num %}{% for x in numx %}{% ifchanged n %}..{% endifchanged %}{{ x }}{% endfor %}{% endfor %}', { 'num': (1,2,3), 'numx': (5,6,7) }, '..567..567..567'),
-
-            # Test multiple parameters to ifchanged.
-            'ifchanged-param03': ('{% for n in num %}{{ n }}{% for x in numx %}{% ifchanged x n %}{{ x }}{% endifchanged %}{% endfor %}{% endfor %}', { 'num': (1,1,2), 'numx': (5,6,6) }, '156156256'),
-
-            # Test a date+hour like construct, where the hour of the last day
-            # is the same but the date had changed, so print the hour anyway.
-            'ifchanged-param04': ('{% for d in days %}{% ifchanged %}{{ d.day }}{% endifchanged %}{% for h in d.hours %}{% ifchanged d h %}{{ h }}{% endifchanged %}{% endfor %}{% endfor %}', {'days':[{'day':1, 'hours':[1,2,3]},{'day':2, 'hours':[3]},] }, '112323'),
-
-            # Logically the same as above, just written with explicit
-            # ifchanged for the day.
-            'ifchanged-param04': ('{% for d in days %}{% ifchanged d.day %}{{ d.day }}{% endifchanged %}{% for h in d.hours %}{% ifchanged d.day h %}{{ h }}{% endifchanged %}{% endfor %}{% endfor %}', {'days':[{'day':1, 'hours':[1,2,3]},{'day':2, 'hours':[3]},] }, '112323'),
-
-            ### IFEQUAL TAG ###########################################################
-            'ifequal01': ("{% ifequal a b %}yes{% endifequal %}", {"a": 1, "b": 2}, ""),
-            'ifequal02': ("{% ifequal a b %}yes{% endifequal %}", {"a": 1, "b": 1}, "yes"),
-            'ifequal03': ("{% ifequal a b %}yes{% else %}no{% endifequal %}", {"a": 1, "b": 2}, "no"),
-            'ifequal04': ("{% ifequal a b %}yes{% else %}no{% endifequal %}", {"a": 1, "b": 1}, "yes"),
-            'ifequal05': ("{% ifequal a 'test' %}yes{% else %}no{% endifequal %}", {"a": "test"}, "yes"),
-            'ifequal06': ("{% ifequal a 'test' %}yes{% else %}no{% endifequal %}", {"a": "no"}, "no"),
-            'ifequal07': ('{% ifequal a "test" %}yes{% else %}no{% endifequal %}', {"a": "test"}, "yes"),
-            'ifequal08': ('{% ifequal a "test" %}yes{% else %}no{% endifequal %}', {"a": "no"}, "no"),
-            'ifequal09': ('{% ifequal a "test" %}yes{% else %}no{% endifequal %}', {}, "no"),
-            'ifequal10': ('{% ifequal a b %}yes{% else %}no{% endifequal %}', {}, "yes"),
-
-            # SMART SPLITTING
-            'ifequal-split01': ('{% ifequal a "test man" %}yes{% else %}no{% endifequal %}', {}, "no"),
-            'ifequal-split02': ('{% ifequal a "test man" %}yes{% else %}no{% endifequal %}', {'a': 'foo'}, "no"),
-            'ifequal-split03': ('{% ifequal a "test man" %}yes{% else %}no{% endifequal %}', {'a': 'test man'}, "yes"),
-            'ifequal-split04': ("{% ifequal a 'test man' %}yes{% else %}no{% endifequal %}", {'a': 'test man'}, "yes"),
-            'ifequal-split05': ("{% ifequal a 'i \"love\" you' %}yes{% else %}no{% endifequal %}", {'a': ''}, "no"),
-            'ifequal-split06': ("{% ifequal a 'i \"love\" you' %}yes{% else %}no{% endifequal %}", {'a': 'i "love" you'}, "yes"),
-            'ifequal-split07': ("{% ifequal a 'i \"love\" you' %}yes{% else %}no{% endifequal %}", {'a': 'i love you'}, "no"),
-            'ifequal-split08': (r"{% ifequal a 'I\'m happy' %}yes{% else %}no{% endifequal %}", {'a': "I'm happy"}, "yes"),
-            'ifequal-split09': (r"{% ifequal a 'slash\man' %}yes{% else %}no{% endifequal %}", {'a': r"slash\man"}, "yes"),
-            'ifequal-split10': (r"{% ifequal a 'slash\man' %}yes{% else %}no{% endifequal %}", {'a': r"slashman"}, "no"),
-
-            # NUMERIC RESOLUTION
-            'ifequal-numeric01': ('{% ifequal x 5 %}yes{% endifequal %}', {'x': '5'}, ''),
-            'ifequal-numeric02': ('{% ifequal x 5 %}yes{% endifequal %}', {'x': 5}, 'yes'),
-            'ifequal-numeric03': ('{% ifequal x 5.2 %}yes{% endifequal %}', {'x': 5}, ''),
-            'ifequal-numeric04': ('{% ifequal x 5.2 %}yes{% endifequal %}', {'x': 5.2}, 'yes'),
-            'ifequal-numeric05': ('{% ifequal x 0.2 %}yes{% endifequal %}', {'x': .2}, 'yes'),
-            'ifequal-numeric06': ('{% ifequal x .2 %}yes{% endifequal %}', {'x': .2}, 'yes'),
-            'ifequal-numeric07': ('{% ifequal x 2. %}yes{% endifequal %}', {'x': 2}, ''),
-            'ifequal-numeric08': ('{% ifequal x "5" %}yes{% endifequal %}', {'x': 5}, ''),
-            'ifequal-numeric09': ('{% ifequal x "5" %}yes{% endifequal %}', {'x': '5'}, 'yes'),
-            'ifequal-numeric10': ('{% ifequal x -5 %}yes{% endifequal %}', {'x': -5}, 'yes'),
-            'ifequal-numeric11': ('{% ifequal x -5.2 %}yes{% endifequal %}', {'x': -5.2}, 'yes'),
-            'ifequal-numeric12': ('{% ifequal x +5 %}yes{% endifequal %}', {'x': 5}, 'yes'),
-
-            ### IFNOTEQUAL TAG ########################################################
-            'ifnotequal01': ("{% ifnotequal a b %}yes{% endifnotequal %}", {"a": 1, "b": 2}, "yes"),
-            'ifnotequal02': ("{% ifnotequal a b %}yes{% endifnotequal %}", {"a": 1, "b": 1}, ""),
-            'ifnotequal03': ("{% ifnotequal a b %}yes{% else %}no{% endifnotequal %}", {"a": 1, "b": 2}, "yes"),
-            'ifnotequal04': ("{% ifnotequal a b %}yes{% else %}no{% endifnotequal %}", {"a": 1, "b": 1}, "no"),
-
-            ### INCLUDE TAG ###########################################################
-            'include01': ('{% include "basic-syntax01" %}', {}, "something cool"),
-            'include02': ('{% include "basic-syntax02" %}', {'headline': 'Included'}, "Included"),
-            'include03': ('{% include template_name %}', {'template_name': 'basic-syntax02', 'headline': 'Included'}, "Included"),
-            'include04': ('a{% include "nonexistent" %}b', {}, "ab"),
-
-            ### NAMED ENDBLOCKS #######################################################
-
-            # Basic test
-            'namedendblocks01': ("1{% block first %}_{% block second %}2{% endblock second %}_{% endblock first %}3", {}, '1_2_3'),
-
-            # Unbalanced blocks
-            'namedendblocks02': ("1{% block first %}_{% block second %}2{% endblock first %}_{% endblock second %}3", {}, template.TemplateSyntaxError),
-            'namedendblocks03': ("1{% block first %}_{% block second %}2{% endblock %}_{% endblock second %}3", {}, template.TemplateSyntaxError),
-            'namedendblocks04': ("1{% block first %}_{% block second %}2{% endblock second %}_{% endblock third %}3", {}, template.TemplateSyntaxError),
-            'namedendblocks05': ("1{% block first %}_{% block second %}2{% endblock first %}", {}, template.TemplateSyntaxError),
-
-            # Mixed named and unnamed endblocks
-            'namedendblocks06': ("1{% block first %}_{% block second %}2{% endblock %}_{% endblock first %}3", {}, '1_2_3'),
-            'namedendblocks07': ("1{% block first %}_{% block second %}2{% endblock second %}_{% endblock %}3", {}, '1_2_3'),
-
-            ### INHERITANCE ###########################################################
-
-            # Standard template with no inheritance
-            'inheritance01': ("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}", {}, '1_3_'),
-
-            # Standard two-level inheritance
-            'inheritance02': ("{% extends 'inheritance01' %}{% block first %}2{% endblock %}{% block second %}4{% endblock %}", {}, '1234'),
-
-            # Three-level with no redefinitions on third level
-            'inheritance03': ("{% extends 'inheritance02' %}", {}, '1234'),
-
-            # Two-level with no redefinitions on second level
-            'inheritance04': ("{% extends 'inheritance01' %}", {}, '1_3_'),
-
-            # Two-level with double quotes instead of single quotes
-            'inheritance05': ('{% extends "inheritance02" %}', {}, '1234'),
-
-            # Three-level with variable parent-template name
-            'inheritance06': ("{% extends foo %}", {'foo': 'inheritance02'}, '1234'),
-
-            # Two-level with one block defined, one block not defined
-            'inheritance07': ("{% extends 'inheritance01' %}{% block second %}5{% endblock %}", {}, '1_35'),
-
-            # Three-level with one block defined on this level, two blocks defined next level
-            'inheritance08': ("{% extends 'inheritance02' %}{% block second %}5{% endblock %}", {}, '1235'),
-
-            # Three-level with second and third levels blank
-            'inheritance09': ("{% extends 'inheritance04' %}", {}, '1_3_'),
-
-            # Three-level with space NOT in a block -- should be ignored
-            'inheritance10': ("{% extends 'inheritance04' %}      ", {}, '1_3_'),
-
-            # Three-level with both blocks defined on this level, but none on second level
-            'inheritance11': ("{% extends 'inheritance04' %}{% block first %}2{% endblock %}{% block second %}4{% endblock %}", {}, '1234'),
-
-            # Three-level with this level providing one and second level providing the other
-            'inheritance12': ("{% extends 'inheritance07' %}{% block first %}2{% endblock %}", {}, '1235'),
-
-            # Three-level with this level overriding second level
-            'inheritance13': ("{% extends 'inheritance02' %}{% block first %}a{% endblock %}{% block second %}b{% endblock %}", {}, '1a3b'),
-
-            # A block defined only in a child template shouldn't be displayed
-            'inheritance14': ("{% extends 'inheritance01' %}{% block newblock %}NO DISPLAY{% endblock %}", {}, '1_3_'),
-
-            # A block within another block
-            'inheritance15': ("{% extends 'inheritance01' %}{% block first %}2{% block inner %}inner{% endblock %}{% endblock %}", {}, '12inner3_'),
-
-            # A block within another block (level 2)
-            'inheritance16': ("{% extends 'inheritance15' %}{% block inner %}out{% endblock %}", {}, '12out3_'),
-
-            # {% load %} tag (parent -- setup for exception04)
-            'inheritance17': ("{% load testtags %}{% block first %}1234{% endblock %}", {}, '1234'),
-
-            # {% load %} tag (standard usage, without inheritance)
-            'inheritance18': ("{% load testtags %}{% echo this that theother %}5678", {}, 'this that theother5678'),
-
-            # {% load %} tag (within a child template)
-            'inheritance19': ("{% extends 'inheritance01' %}{% block first %}{% load testtags %}{% echo 400 %}5678{% endblock %}", {}, '140056783_'),
-
-            # Two-level inheritance with {{ block.super }}
-            'inheritance20': ("{% extends 'inheritance01' %}{% block first %}{{ block.super }}a{% endblock %}", {}, '1_a3_'),
-
-            # Three-level inheritance with {{ block.super }} from parent
-            'inheritance21': ("{% extends 'inheritance02' %}{% block first %}{{ block.super }}a{% endblock %}", {}, '12a34'),
-
-            # Three-level inheritance with {{ block.super }} from grandparent
-            'inheritance22': ("{% extends 'inheritance04' %}{% block first %}{{ block.super }}a{% endblock %}", {}, '1_a3_'),
-
-            # Three-level inheritance with {{ block.super }} from parent and grandparent
-            'inheritance23': ("{% extends 'inheritance20' %}{% block first %}{{ block.super }}b{% endblock %}", {}, '1_ab3_'),
-
-            # Inheritance from local context without use of template loader
-            'inheritance24': ("{% extends context_template %}{% block first %}2{% endblock %}{% block second %}4{% endblock %}", {'context_template': template.Template("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}")}, '1234'),
-
-            # Inheritance from local context with variable parent template
-            'inheritance25': ("{% extends context_template.1 %}{% block first %}2{% endblock %}{% block second %}4{% endblock %}", {'context_template': [template.Template("Wrong"), template.Template("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}")]}, '1234'),
-
-            ### I18N ##################################################################
-
-            # {% spaceless %} tag
-            'spaceless01': ("{% spaceless %} <b>    <i> text </i>    </b> {% endspaceless %}", {}, "<b> <i> text </i> </b>"),
-            'spaceless02': ("{% spaceless %} <b> \n <i> text </i> \n </b> {% endspaceless %}", {}, "<b> <i> text </i> </b>"),
-            'spaceless03': ("{% spaceless %}<b><i>text</i></b>{% endspaceless %}", {}, "<b><i>text</i></b>"),
-
-            # simple translation of a string delimited by '
-            'i18n01': ("{% load i18n %}{% trans 'xxxyyyxxx' %}", {}, "xxxyyyxxx"),
-
-            # simple translation of a string delimited by "
-            'i18n02': ('{% load i18n %}{% trans "xxxyyyxxx" %}', {}, "xxxyyyxxx"),
-
-            # simple translation of a variable
-            'i18n03': ('{% load i18n %}{% blocktrans %}{{ anton }}{% endblocktrans %}', {'anton': 'xxxyyyxxx'}, "xxxyyyxxx"),
-
-            # simple translation of a variable and filter
-            'i18n04': ('{% load i18n %}{% blocktrans with anton|lower as berta %}{{ berta }}{% endblocktrans %}', {'anton': 'XXXYYYXXX'}, "xxxyyyxxx"),
-
-            # simple translation of a string with interpolation
-            'i18n05': ('{% load i18n %}{% blocktrans %}xxx{{ anton }}xxx{% endblocktrans %}', {'anton': 'yyy'}, "xxxyyyxxx"),
-
-            # simple translation of a string to german
-            'i18n06': ('{% load i18n %}{% trans "Page not found" %}', {'LANGUAGE_CODE': 'de'}, "Seite nicht gefunden"),
-
-            # translation of singular form
-            'i18n07': ('{% load i18n %}{% blocktrans count number as counter %}singular{% plural %}plural{% endblocktrans %}', {'number': 1}, "singular"),
-
-            # translation of plural form
-            'i18n08': ('{% load i18n %}{% blocktrans count number as counter %}singular{% plural %}plural{% endblocktrans %}', {'number': 2}, "plural"),
-
-            # simple non-translation (only marking) of a string to german
-            'i18n09': ('{% load i18n %}{% trans "Page not found" noop %}', {'LANGUAGE_CODE': 'de'}, "Page not found"),
-
-            # translation of a variable with a translated filter
-            'i18n10': ('{{ bool|yesno:_("ja,nein") }}', {'bool': True}, 'ja'),
-
-            # translation of a variable with a non-translated filter
-            'i18n11': ('{{ bool|yesno:"ja,nein" }}', {'bool': True}, 'ja'),
-
-            # usage of the get_available_languages tag
-            'i18n12': ('{% load i18n %}{% get_available_languages as langs %}{% for lang in langs %}{% ifequal lang.0 "de" %}{{ lang.0 }}{% endifequal %}{% endfor %}', {}, 'de'),
-
-            # translation of a constant string
-            'i18n13': ('{{ _("Page not found") }}', {'LANGUAGE_CODE': 'de'}, 'Seite nicht gefunden'),
-
-            ### HANDLING OF TEMPLATE_TAG_IF_INVALID ###################################
-
-            'invalidstr01': ('{{ var|default:"Foo" }}', {}, ('Foo','INVALID')),
-            'invalidstr02': ('{{ var|default_if_none:"Foo" }}', {}, ('','INVALID')),
-            'invalidstr03': ('{% for v in var %}({{ v }}){% endfor %}', {}, ''),
-            'invalidstr04': ('{% if var %}Yes{% else %}No{% endif %}', {}, 'No'),
-            'invalidstr04': ('{% if var|default:"Foo" %}Yes{% else %}No{% endif %}', {}, 'Yes'),
-
-            ### MULTILINE #############################################################
-
-            'multiline01': ("""
-                            Hello,
-                            boys.
-                            How
-                            are
-                            you
-                            gentlemen.
-                            """,
-                            {},
-                            """
-                            Hello,
-                            boys.
-                            How
-                            are
-                            you
-                            gentlemen.
-                            """),
-
-            ### REGROUP TAG ###########################################################
-            'regroup01': ('{% regroup data by bar as grouped %}' + \
-                          '{% for group in grouped %}' + \
-                          '{{ group.grouper }}:' + \
-                          '{% for item in group.list %}' + \
-                          '{{ item.foo }}' + \
-                          '{% endfor %},' + \
-                          '{% endfor %}',
-                          {'data': [ {'foo':'c', 'bar':1},
-                                     {'foo':'d', 'bar':1},
-                                     {'foo':'a', 'bar':2},
-                                     {'foo':'b', 'bar':2},
-                                     {'foo':'x', 'bar':3}  ]},
-                          '1:cd,2:ab,3:x,'),
-
-            # Test for silent failure when target variable isn't found
-            'regroup02': ('{% regroup data by bar as grouped %}' + \
-                          '{% for group in grouped %}' + \
-                          '{{ group.grouper }}:' + \
-                          '{% for item in group.list %}' + \
-                          '{{ item.foo }}' + \
-                          '{% endfor %},' + \
-                          '{% endfor %}',
-                          {}, ''),
-
-            ### TEMPLATETAG TAG #######################################################
-            'templatetag01': ('{% templatetag openblock %}', {}, '{%'),
-            'templatetag02': ('{% templatetag closeblock %}', {}, '%}'),
-            'templatetag03': ('{% templatetag openvariable %}', {}, '{{'),
-            'templatetag04': ('{% templatetag closevariable %}', {}, '}}'),
-            'templatetag05': ('{% templatetag %}', {}, template.TemplateSyntaxError),
-            'templatetag06': ('{% templatetag foo %}', {}, template.TemplateSyntaxError),
-            'templatetag07': ('{% templatetag openbrace %}', {}, '{'),
-            'templatetag08': ('{% templatetag closebrace %}', {}, '}'),
-            'templatetag09': ('{% templatetag openbrace %}{% templatetag openbrace %}', {}, '{{'),
-            'templatetag10': ('{% templatetag closebrace %}{% templatetag closebrace %}', {}, '}}'),
-            'templatetag11': ('{% templatetag opencomment %}', {}, '{#'),
-            'templatetag12': ('{% templatetag closecomment %}', {}, '#}'),
-
-            ### WIDTHRATIO TAG ########################################################
-            'widthratio01': ('{% widthratio a b 0 %}', {'a':50,'b':100}, '0'),
-            'widthratio02': ('{% widthratio a b 100 %}', {'a':0,'b':0}, ''),
-            'widthratio03': ('{% widthratio a b 100 %}', {'a':0,'b':100}, '0'),
-            'widthratio04': ('{% widthratio a b 100 %}', {'a':50,'b':100}, '50'),
-            'widthratio05': ('{% widthratio a b 100 %}', {'a':100,'b':100}, '100'),
-
-            # 62.5 should round to 63
-            'widthratio06': ('{% widthratio a b 100 %}', {'a':50,'b':80}, '63'),
-
-            # 71.4 should round to 71
-            'widthratio07': ('{% widthratio a b 100 %}', {'a':50,'b':70}, '71'),
-
-            # Raise exception if we don't have 3 args, last one an integer
-            'widthratio08': ('{% widthratio %}', {}, template.TemplateSyntaxError),
-            'widthratio09': ('{% widthratio a b %}', {'a':50,'b':100}, template.TemplateSyntaxError),
-            'widthratio10': ('{% widthratio a b 100.0 %}', {'a':50,'b':100}, template.TemplateSyntaxError),
-
-            ### NOW TAG ########################################################
-            # Simple case
-            'now01' : ('{% now "j n Y"%}', {}, str(datetime.now().day) + ' ' + str(datetime.now().month) + ' ' + str(datetime.now().year)),
-
-            # Check parsing of escaped and special characters
-            'now02' : ('{% now "j "n" Y"%}', {}, template.TemplateSyntaxError),
-        #    'now03' : ('{% now "j \"n\" Y"%}', {}, str(datetime.now().day) + '"' + str(datetime.now().month) + '"' + str(datetime.now().year)),
-        #    'now04' : ('{% now "j \nn\n Y"%}', {}, str(datetime.now().day) + '\n' + str(datetime.now().month) + '\n' + str(datetime.now().year))
-
-            ### TIMESINCE TAG ##################################################
-            # Default compare with datetime.now()
-            'timesince01' : ('{{ a|timesince }}', {'a':datetime.now() + timedelta(minutes=-1, seconds = -10)}, '1 minute'),
-            'timesince02' : ('{{ a|timesince }}', {'a':(datetime.now() - timedelta(days=1, minutes = 1))}, '1 day'),
-            'timesince03' : ('{{ a|timesince }}', {'a':(datetime.now() -
-                timedelta(hours=1, minutes=25, seconds = 10))}, '1 hour, 25 minutes'),
-
-            # Compare to a given parameter
-            'timesince04' : ('{{ a|timesince:b }}', {'a':NOW + timedelta(days=2), 'b':NOW + timedelta(days=1)}, '1 day'),
-            'timesince05' : ('{{ a|timesince:b }}', {'a':NOW + timedelta(days=2, minutes=1), 'b':NOW + timedelta(days=2)}, '1 minute'),
-
-            # Check that timezone is respected
-            'timesince06' : ('{{ a|timesince:b }}', {'a':NOW_tz + timedelta(hours=8), 'b':NOW_tz}, '8 hours'),
-
-            ### TIMEUNTIL TAG ##################################################
-            # Default compare with datetime.now()
-            'timeuntil01' : ('{{ a|timeuntil }}', {'a':datetime.now() + timedelta(minutes=2, seconds = 10)}, '2 minutes'),
-            'timeuntil02' : ('{{ a|timeuntil }}', {'a':(datetime.now() + timedelta(days=1, seconds = 10))}, '1 day'),
-            'timeuntil03' : ('{{ a|timeuntil }}', {'a':(datetime.now() + timedelta(hours=8, minutes=10, seconds = 10))}, '8 hours, 10 minutes'),
-
-            # Compare to a given parameter
-            'timeuntil04' : ('{{ a|timeuntil:b }}', {'a':NOW - timedelta(days=1), 'b':NOW - timedelta(days=2)}, '1 day'),
-            'timeuntil05' : ('{{ a|timeuntil:b }}', {'a':NOW - timedelta(days=2), 'b':NOW - timedelta(days=2, minutes=1)}, '1 minute'),
-
-            ### URL TAG ########################################################
-            # Successes
-            'url01' : ('{% url regressiontests.templates.views.client client.id %}', {'client': {'id': 1}}, '/url_tag/client/1/'),
-            'url02' : ('{% url regressiontests.templates.views.client_action client.id,action="update" %}', {'client': {'id': 1}}, '/url_tag/client/1/update/'),
-            'url03' : ('{% url regressiontests.templates.views.index %}', {}, '/url_tag/'),
-
-            # Failures
-            'url04' : ('{% url %}', {}, template.TemplateSyntaxError),
-            'url05' : ('{% url no_such_view %}', {}, ''),
-            'url06' : ('{% url regressiontests.templates.views.client no_such_param="value" %}', {}, ''),
-        }
-
-        # Register our custom template loader.
-        def test_template_loader(template_name, template_dirs=None):
-            "A custom template loader that loads the unit-test templates."
-            try:
-                return (TEMPLATE_TESTS[template_name][0] , "test:%s" % template_name)
-            except KeyError:
-                raise template.TemplateDoesNotExist, template_name
-
-        old_template_loaders = loader.template_source_loaders
-        loader.template_source_loaders = [test_template_loader]
-
-        failures = []
-        tests = TEMPLATE_TESTS.items()
-        tests.sort()
-
-        # Turn TEMPLATE_DEBUG off, because tests assume that.
-        old_td, settings.TEMPLATE_DEBUG = settings.TEMPLATE_DEBUG, False
-
-        # Set TEMPLATE_STRING_IF_INVALID to a known string
-        old_invalid = settings.TEMPLATE_STRING_IF_INVALID
-
-        for name, vals in tests:
-            install()
-
-            if isinstance(vals[2], tuple):
-                normal_string_result = vals[2][0]
-                invalid_string_result = vals[2][1]
-            else:
-                normal_string_result = vals[2]
-                invalid_string_result = vals[2]
-
-            if 'LANGUAGE_CODE' in vals[1]:
-                activate(vals[1]['LANGUAGE_CODE'])
-            else:
-                activate('en-us')
-
-            for invalid_str, result in [('', normal_string_result),
-                                        ('INVALID', invalid_string_result)]:
-                settings.TEMPLATE_STRING_IF_INVALID = invalid_str
-                try:
-                    output = loader.get_template(name).render(template.Context(vals[1]))
-                except Exception, e:
-                    if e.__class__ != result:
-                        failures.append("Template test (TEMPLATE_STRING_IF_INVALID='%s'): %s -- FAILED. Got %s, exception: %s" % (invalid_str, name, e.__class__, e))
-                    continue
-                if output != result:
-                    failures.append("Template test (TEMPLATE_STRING_IF_INVALID='%s'): %s -- FAILED. Expected %r, got %r" % (invalid_str, name, result, output))
-
-            if 'LANGUAGE_CODE' in vals[1]:
-                deactivate()
-
-        loader.template_source_loaders = old_template_loaders
-        deactivate()
-        settings.TEMPLATE_DEBUG = old_td
-        settings.TEMPLATE_STRING_IF_INVALID = old_invalid
-
-        self.assertEqual(failures, [], '\n'.join(failures))
-
-if __name__ == "__main__":
-    unittest.main()
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/templates/urls.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-from django.conf.urls.defaults import *
-from regressiontests.templates import views
-
-urlpatterns = patterns('',
-
-    # Test urls for testing reverse lookups
-    (r'^$', views.index),
-    (r'^client/(\d+)/$', views.client),
-    (r'^client/(\d+)/(?P<action>[^/]+)/$', views.client_action),
-)
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/templates/views.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-# Fake views for testing url reverse lookup
-
-def index(request):
-    pass
-
-def client(request, id):
-    pass
-
-def client_action(request, id, action):
-    pass
--- a/thirdparty/google_appengine/lib/django/tests/regressiontests/urlpatterns_reverse/tests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-"Unit tests for reverse URL lookup"
-
-from django.core.urlresolvers import reverse_helper, NoReverseMatch
-import re, unittest
-
-test_data = (
-    ('^places/(\d+)/$', 'places/3/', [3], {}),
-    ('^places/(\d+)/$', 'places/3/', ['3'], {}),
-    ('^places/(\d+)/$', NoReverseMatch, ['a'], {}),
-    ('^places/(\d+)/$', NoReverseMatch, [], {}),
-    ('^places/(?P<id>\d+)/$', 'places/3/', [], {'id': 3}),
-    ('^people/(?P<name>\w+)/$', 'people/adrian/', ['adrian'], {}),
-    ('^people/(?P<name>\w+)/$', 'people/adrian/', [], {'name': 'adrian'}),
-    ('^people/(?P<name>\w+)/$', NoReverseMatch, ['name with spaces'], {}),
-    ('^people/(?P<name>\w+)/$', NoReverseMatch, [], {'name': 'name with spaces'}),
-    ('^people/(?P<name>\w+)/$', NoReverseMatch, [], {}),
-    ('^hardcoded/$', 'hardcoded/', [], {}),
-    ('^hardcoded/$', 'hardcoded/', ['any arg'], {}),
-    ('^hardcoded/$', 'hardcoded/', [], {'kwarg': 'foo'}),
-    ('^people/(?P<state>\w\w)/(?P<name>\w+)/$', 'people/il/adrian/', [], {'state': 'il', 'name': 'adrian'}),
-    ('^people/(?P<state>\w\w)/(?P<name>\d)/$', NoReverseMatch, [], {'state': 'il', 'name': 'adrian'}),
-    ('^people/(?P<state>\w\w)/(?P<name>\w+)/$', NoReverseMatch, [], {'state': 'il'}),
-    ('^people/(?P<state>\w\w)/(?P<name>\w+)/$', NoReverseMatch, [], {'name': 'adrian'}),
-    ('^people/(?P<state>\w\w)/(\w+)/$', NoReverseMatch, ['il'], {'name': 'adrian'}),
-    ('^people/(?P<state>\w\w)/(\w+)/$', 'people/il/adrian/', ['adrian'], {'state': 'il'}),
-)
-
-class URLPatternReverse(unittest.TestCase):
-    def test_urlpattern_reverse(self):
-        for regex, expected, args, kwargs in test_data:
-            try:
-                got = reverse_helper(re.compile(regex), *args, **kwargs)
-            except NoReverseMatch, e:
-                self.assertEqual(expected, NoReverseMatch)
-            else:
-                self.assertEquals(got, expected)
-
-if __name__ == "__main__":
-    run_tests(1)
--- a/thirdparty/google_appengine/lib/django/tests/runtests.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,154 +0,0 @@
-#!/usr/bin/env python
-
-import os, sys, traceback
-import unittest
-
-MODEL_TESTS_DIR_NAME = 'modeltests'
-REGRESSION_TESTS_DIR_NAME = 'regressiontests'
-TEST_DATABASE_NAME = 'django_test_db'
-TEST_TEMPLATE_DIR = 'templates'
-
-MODEL_TEST_DIR = os.path.join(os.path.dirname(__file__), MODEL_TESTS_DIR_NAME)
-REGRESSION_TEST_DIR = os.path.join(os.path.dirname(__file__), REGRESSION_TESTS_DIR_NAME)
-
-ALWAYS_INSTALLED_APPS = [
-    'django.contrib.contenttypes',
-    'django.contrib.auth',
-    'django.contrib.sites',
-    'django.contrib.flatpages',
-    'django.contrib.redirects',
-    'django.contrib.sessions',
-    'django.contrib.comments',
-    'django.contrib.admin',
-]
-
-def get_test_models():
-    models = []
-    for loc, dirpath in (MODEL_TESTS_DIR_NAME, MODEL_TEST_DIR), (REGRESSION_TESTS_DIR_NAME, REGRESSION_TEST_DIR):
-        for f in os.listdir(dirpath):
-            if f.startswith('__init__') or f.startswith('.') or f.startswith('sql') or f.startswith('invalid'):
-                continue
-            models.append((loc, f))
-    return models
-
-def get_invalid_models():
-    models = []
-    for loc, dirpath in (MODEL_TESTS_DIR_NAME, MODEL_TEST_DIR), (REGRESSION_TESTS_DIR_NAME, REGRESSION_TEST_DIR):
-        for f in os.listdir(dirpath):
-            if f.startswith('__init__') or f.startswith('.') or f.startswith('sql'):
-                continue
-            if f.startswith('invalid'):
-                models.append((loc, f))
-    return models
-
-class InvalidModelTestCase(unittest.TestCase):
-    def __init__(self, model_label):
-        unittest.TestCase.__init__(self)
-        self.model_label = model_label
-
-    def runTest(self):
-        from django.core import management
-        from django.db.models.loading import load_app
-        from cStringIO import StringIO
-
-        try:
-            module = load_app(self.model_label)
-        except Exception, e:
-            self.fail('Unable to load invalid model module')
-
-        s = StringIO()
-        count = management.get_validation_errors(s, module)
-        s.seek(0)
-        error_log = s.read()
-        actual = error_log.split('\n')
-        expected = module.model_errors.split('\n')
-
-        unexpected = [err for err in actual if err not in expected]
-        missing = [err for err in expected if err not in actual]
-
-        self.assert_(not unexpected, "Unexpected Errors: " + '\n'.join(unexpected))
-        self.assert_(not missing, "Missing Errors: " + '\n'.join(missing))
-
-def django_tests(verbosity, tests_to_run):
-    from django.conf import settings
-
-    old_installed_apps = settings.INSTALLED_APPS
-    old_test_database_name = settings.TEST_DATABASE_NAME
-    old_root_urlconf = settings.ROOT_URLCONF
-    old_template_dirs = settings.TEMPLATE_DIRS
-    old_use_i18n = settings.USE_I18N
-    old_middleware_classes = settings.MIDDLEWARE_CLASSES
-
-    # Redirect some settings for the duration of these tests.
-    settings.TEST_DATABASE_NAME = TEST_DATABASE_NAME
-    settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS
-    settings.ROOT_URLCONF = 'urls'
-    settings.TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), TEST_TEMPLATE_DIR),)
-    settings.USE_I18N = True
-    settings.MIDDLEWARE_CLASSES = (
-        'django.contrib.sessions.middleware.SessionMiddleware',
-        'django.contrib.auth.middleware.AuthenticationMiddleware',
-        'django.middleware.common.CommonMiddleware',
-    )
-
-    # Load all the ALWAYS_INSTALLED_APPS.
-    # (This import statement is intentionally delayed until after we
-    # access settings because of the USE_I18N dependency.)
-    from django.db.models.loading import get_apps, load_app
-    get_apps()
-
-    # Load all the test model apps.
-    test_models = []
-    for model_dir, model_name in get_test_models():
-        model_label = '.'.join([model_dir, model_name])
-        try:
-            # if the model was named on the command line, or
-            # no models were named (i.e., run all), import
-            # this model and add it to the list to test.
-            if not tests_to_run or model_name in tests_to_run:
-                if verbosity >= 1:
-                    print "Importing model %s" % model_name
-                mod = load_app(model_label)
-                settings.INSTALLED_APPS.append(model_label)
-                test_models.append(mod)
-        except Exception, e:
-            sys.stderr.write("Error while importing %s:" % model_name + ''.join(traceback.format_exception(*sys.exc_info())[1:]))
-            continue
-
-    # Add tests for invalid models.
-    extra_tests = []
-    for model_dir, model_name in get_invalid_models():
-        model_label = '.'.join([model_dir, model_name])
-        if not tests_to_run or model_name in tests_to_run:
-            extra_tests.append(InvalidModelTestCase(model_label))
-
-    # Run the test suite, including the extra validation tests.
-    from django.test.simple import run_tests
-    failures = run_tests(test_models, verbosity, extra_tests=extra_tests)
-    if failures:
-        sys.exit(failures)
-
-    # Restore the old settings.
-    settings.INSTALLED_APPS = old_installed_apps
-    settings.TESTS_DATABASE_NAME = old_test_database_name
-    settings.ROOT_URLCONF = old_root_urlconf
-    settings.TEMPLATE_DIRS = old_template_dirs
-    settings.USE_I18N = old_use_i18n
-    settings.MIDDLEWARE_CLASSES = old_middleware_classes
-
-if __name__ == "__main__":
-    from optparse import OptionParser
-    usage = "%prog [options] [model model model ...]"
-    parser = OptionParser(usage=usage)
-    parser.add_option('-v','--verbosity', action='store', dest='verbosity', default='0',
-        type='choice', choices=['0', '1', '2'],
-        help='Verbosity level; 0=minimal output, 1=normal output, 2=all output')
-    parser.add_option('--settings',
-        help='Python path to settings module, e.g. "myproject.settings". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.')
-    options, args = parser.parse_args()
-    if options.settings:
-        os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
-    elif "DJANGO_SETTINGS_MODULE" not in os.environ:
-        parser.error("DJANGO_SETTINGS_MODULE is not set in the environment. "
-                      "Set it or use --settings.")
-    django_tests(int(options.verbosity), args)
--- a/thirdparty/google_appengine/lib/django/tests/templates/404.html	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-Django Internal Tests: 404 Error
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/templates/500.html	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-Django Internal Tests: 500 Error
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/templates/login.html	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,19 +0,0 @@
-<html>
-<head></head>
-<body>
-<h1>Django Internal Tests: Login</h1>
-{% if form.has_errors %}
-<p>Your username and password didn't match. Please try again.</p>
-{% endif %}
-
-<form method="post" action=".">
-<table>
-<tr><td><label for="id_username">Username:</label></td><td>{{ form.username }}</td></tr>
-<tr><td><label for="id_password">Password:</label></td><td>{{ form.password }}</td></tr>
-</table>
-
-<input type="submit" value="login" />
-<input type="hidden" name="next" value="{{ next }}" />
-</form>
-</body>
-</html>
\ No newline at end of file
--- a/thirdparty/google_appengine/lib/django/tests/urls.py	Mon Sep 07 20:26:39 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-from django.conf.urls.defaults import *
-
-urlpatterns = patterns('',
-    # test_client modeltest urls
-    (r'^test_client/', include('modeltests.test_client.urls')),
-
-    # Always provide the auth system login and logout views
-    (r'^accounts/login/$', 'django.contrib.auth.views.login', {'template_name': 'login.html'}),
-    (r'^accounts/logout/$', 'django.contrib.auth.views.logout'),
-
-    # test urlconf for {% url %} template tag
-    (r'^url_tag/', include('regressiontests.templates.urls')),
-)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/remote_api_shell.py	Mon Sep 07 20:27:37 2009 +0200
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Convenience wrapper for starting an appengine tool."""
+
+
+import os
+import sys
+
+if not hasattr(sys, 'version_info'):
+  sys.stderr.write('Very old versions of Python are not supported. Please '
+                   'use version 2.5 or greater.\n')
+  sys.exit(1)
+version_tuple = tuple(sys.version_info[:2])
+if version_tuple < (2, 4):
+  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
+                   'version 2.5 or greater.\n' % version_tuple)
+  sys.exit(1)
+if version_tuple == (2, 4):
+  sys.stderr.write('Warning: Python 2.4 is not supported; this program may '
+                   'break. Please use version 2.5 or greater.\n')
+
+DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
+SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
+
+EXTRA_PATHS = [
+  DIR_PATH,
+  os.path.join(DIR_PATH, 'lib', 'antlr3'),
+  os.path.join(DIR_PATH, 'lib', 'django'),
+  os.path.join(DIR_PATH, 'lib', 'webob'),
+  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
+]
+
+SCRIPT_EXCEPTIONS = {
+  "dev_appserver.py" : "dev_appserver_main.py"
+}
+
+def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
+  """Execute the file at the specified path with the passed-in globals."""
+  sys.path = EXTRA_PATHS + sys.path
+  script_name = os.path.basename(file_path)
+  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
+  script_path = os.path.join(script_dir, script_name)
+  execfile(script_path, globals_)
+
+if __name__ == '__main__':
+  run_file(__file__, globals())
Binary file thirdparty/shrinksafe/js.jar has changed
Binary file thirdparty/shrinksafe/shrinksafe.jar has changed