Load /Users/solydzajs/Downloads/google_appengine/ into
authorPawel Solyga <Pawel.Solyga@gmail.com>
Fri, 10 Oct 2008 13:14:24 +0000
changeset 297 35211afcd563
parent 296 b02dd2a5f329
child 298 c76a366c7ab4
Load /Users/solydzajs/Downloads/google_appengine/ into trunk/thirdparty/google_appengine.
thirdparty/google_appengine/RELEASE_NOTES
thirdparty/google_appengine/VERSION
thirdparty/google_appengine/google/appengine/api/appinfo.py
thirdparty/google_appengine/google/appengine/api/datastore.py
thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py
thirdparty/google_appengine/google/appengine/api/datastore_types.py
thirdparty/google_appengine/google/appengine/api/mail.py
thirdparty/google_appengine/google/appengine/api/memcache/__init__.py
thirdparty/google_appengine/google/appengine/api/urlfetch.py
thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py
thirdparty/google_appengine/google/appengine/ext/admin/__init__.py
thirdparty/google_appengine/google/appengine/ext/bulkload/__init__.py
thirdparty/google_appengine/google/appengine/ext/db/__init__.py
thirdparty/google_appengine/google/appengine/ext/search/__init__.py
thirdparty/google_appengine/google/appengine/ext/webapp/__init__.py
thirdparty/google_appengine/google/appengine/tools/dev_appserver.py
--- a/thirdparty/google_appengine/RELEASE_NOTES	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/RELEASE_NOTES	Fri Oct 10 13:14:24 2008 +0000
@@ -3,6 +3,25 @@
 
 App Engine SDK - Release Notes
 
+Version 1.1.5 - September 29, 2008
+==================================
+
+  - Additional fixes for file paths on Windows and OSX.
+  - Sped up the datastore stub.
+  - Allow different types in list properties in datastore.Entity and Expando.
+  - Add add_multi and replace_multi to memcache API.
+      http://code.google.com/appengine/docs/memcache/clientclass.html#Client_add_multi
+      http://code.google.com/appengine/docs/memcache/clientclass.html#Client_replace_multi
+  - Ignore errors from the API proxy when calling memcache read methods.
+  - Set the webapp Request charset property more accurately from CONTENT_TYPE.
+  - Fixed an issue in the development console with schema caching.
+  - Fixed an issue with StringListProperty not returning a class
+      http://code.google.com/p/googleappengine/issues/detail?id=415
+  - Fixed an issue in the development console where quotes couldn't be used
+      within fields.
+  - Fixed an issue with TimeProperty("0:0") (midnight).
+      http://code.google.com/p/googleappengine/issues/detail?id=279
+
 Version 1.1.4 - September 26, 2008
 ==================================
 
--- a/thirdparty/google_appengine/VERSION	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/VERSION	Fri Oct 10 13:14:24 2008 +0000
@@ -1,3 +1,3 @@
-release: "1.1.4"
-timestamp: 1222457616
+release: "1.1.5"
+timestamp: 1222740096
 api_versions: ['1']
--- a/thirdparty/google_appengine/google/appengine/api/appinfo.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/appinfo.py	Fri Oct 10 13:14:24 2008 +0000
@@ -62,6 +62,10 @@
 LOGIN_REQUIRED = 'required'
 LOGIN_ADMIN = 'admin'
 
+SECURE_HTTP = 'never'
+SECURE_HTTPS = 'always'
+SECURE_HTTP_OR_HTTPS = 'optional'
+
 RUNTIME_PYTHON = 'python'
 
 DEFAULT_SKIP_FILES = (r"^(.*/)?("
@@ -77,6 +81,7 @@
                       r")$")
 
 LOGIN = 'login'
+SECURE = 'secure'
 URL = 'url'
 STATIC_FILES = 'static_files'
 UPLOAD = 'upload'
@@ -117,6 +122,8 @@
   Attributes:
     login: Whether or not login is required to access URL.  Defaults to
       'optional'.
+    secure: Restriction on the protocol which can be used to serve
+            this URL/handler (HTTP, HTTPS or either).
     url: Regular expression used to fully match against the request URLs path.
       See Special Cases for using static_dir.
     static_files: Handler id attribute that maps URL to the appropriate
@@ -171,6 +178,11 @@
                               LOGIN_ADMIN,
                               default=LOGIN_OPTIONAL),
 
+    SECURE: validation.Options(SECURE_HTTP,
+                               SECURE_HTTPS,
+                               SECURE_HTTP_OR_HTTPS,
+                               default=SECURE_HTTP),
+
 
 
     HANDLER_STATIC_FILES: validation.Optional(_FILES_REGEX),
@@ -187,7 +199,7 @@
     HANDLER_SCRIPT: validation.Optional(_FILES_REGEX),
   }
 
-  COMMON_FIELDS = set([URL, LOGIN])
+  COMMON_FIELDS = set([URL, LOGIN, SECURE])
 
   ALLOWED_FIELDS = {
     HANDLER_STATIC_FILES: (MIME_TYPE, UPLOAD, EXPIRATION),
--- a/thirdparty/google_appengine/google/appengine/api/datastore.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/datastore.py	Fri Oct 10 13:14:24 2008 +0000
@@ -1433,7 +1433,7 @@
           if (err.application_error ==
               datastore_pb.Error.CONCURRENT_TRANSACTION):
             logging.warning('Transaction collision for entity group with '
-                            'key %r', tx.entity_group)
+                            'key %r. Retrying...', tx.entity_group)
             tx.handle = None
             tx.entity_group = None
             continue
--- a/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Fri Oct 10 13:14:24 2008 +0000
@@ -64,6 +64,25 @@
 datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())
 
 
+class _StoredEntity(object):
+  """Simple wrapper around an entity stored by the stub.
+
+  Public properties:
+    native: Native protobuf Python object, entity_pb.EntityProto.
+    encoded: Encoded binary representation of above protobuf.
+  """
+
+  def __init__(self, entity):
+    """Create a _StoredEntity object and store an entity.
+
+    Args:
+      entity: entity_pb.EntityProto to store.
+    """
+    self.native = entity
+
+    self.encoded = entity.Encode()
+
+
 class DatastoreFileStub(object):
   """ Persistent stub for the Python datastore API.
 
@@ -72,6 +91,29 @@
   and is backed by files on disk.
   """
 
+  _PROPERTY_TYPE_TAGS = {
+    datastore_types.Blob: entity_pb.PropertyValue.kstringValue,
+    bool: entity_pb.PropertyValue.kbooleanValue,
+    datastore_types.Category: entity_pb.PropertyValue.kstringValue,
+    datetime.datetime: entity_pb.PropertyValue.kint64Value,
+    datastore_types.Email: entity_pb.PropertyValue.kstringValue,
+    float: entity_pb.PropertyValue.kdoubleValue,
+    datastore_types.GeoPt: entity_pb.PropertyValue.kPointValueGroup,
+    datastore_types.IM: entity_pb.PropertyValue.kstringValue,
+    int: entity_pb.PropertyValue.kint64Value,
+    datastore_types.Key: entity_pb.PropertyValue.kReferenceValueGroup,
+    datastore_types.Link: entity_pb.PropertyValue.kstringValue,
+    long: entity_pb.PropertyValue.kint64Value,
+    datastore_types.PhoneNumber: entity_pb.PropertyValue.kstringValue,
+    datastore_types.PostalAddress: entity_pb.PropertyValue.kstringValue,
+    datastore_types.Rating: entity_pb.PropertyValue.kint64Value,
+    str: entity_pb.PropertyValue.kstringValue,
+    datastore_types.Text: entity_pb.PropertyValue.kstringValue,
+    type(None): 0,
+    unicode: entity_pb.PropertyValue.kstringValue,
+    users.User: entity_pb.PropertyValue.kUserValueGroup,
+    }
+
   def __init__(self, app_id, datastore_file, history_file,
                require_indexes=False):
     """Constructor.
@@ -159,7 +201,7 @@
         last_path = entity.key().path().element_list()[-1]
         app_kind = (entity.key().app(), last_path.type())
         kind_dict = self.__entities.setdefault(app_kind, {})
-        kind_dict[entity.key()] = entity
+        kind_dict[entity.key()] = _StoredEntity(entity)
 
         if last_path.has_id() and last_path.id() >= self.__next_id:
           self.__next_id = last_path.id() + 1
@@ -192,7 +234,7 @@
       encoded = []
       for kind_dict in self.__entities.values():
         for entity in kind_dict.values():
-          encoded.append(entity.Encode())
+          encoded.append(entity.encoded)
 
       self.__WritePickled(encoded, self.__datastore_file)
 
@@ -303,7 +345,7 @@
       for clone in clones:
         last_path = clone.key().path().element_list()[-1]
         kind_dict = self.__entities.setdefault((app, last_path.type()), {})
-        kind_dict[clone.key()] = clone
+        kind_dict[clone.key()] = _StoredEntity(clone)
     finally:
       self.__entities_lock.release()
 
@@ -320,7 +362,7 @@
 
         group = get_response.add_entity()
         try:
-          entity = self.__entities[app, last_path.type()][key]
+          entity = self.__entities[app, last_path.type()][key].native
         except KeyError:
           entity = None
 
@@ -390,7 +432,7 @@
     try:
       query.set_app(app)
       results = self.__entities[app, query.kind()].values()
-      results = [datastore.Entity._FromPb(pb) for pb in results]
+      results = [datastore.Entity._FromPb(entity.native) for entity in results]
     except KeyError:
       results = []
 
@@ -432,7 +474,15 @@
           for filter_prop in filt.property_list():
             filter_val = datastore_types.FromPropertyPb(filter_prop)
 
-            comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)
+            fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
+              fixed_entity_val.__class__)
+            filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
+            if fixed_entity_type == filter_type:
+              comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)
+            elif op != '==':
+              comp = '%r %s %r' % (fixed_entity_type, op, filter_type)
+            else:
+              continue
 
             logging.log(logging.DEBUG - 1,
                         'Evaling filter expression "%s"', comp)
@@ -463,7 +513,7 @@
       prop = order.property().decode('utf-8')
       results = [entity for entity in results if has_prop_indexed(entity, prop)]
 
-    def order_compare(a, b):
+    def order_compare_entities(a, b):
       """ Return a negative, zero or positive number depending on whether
       entity a is considered smaller than, equal to, or larger than b,
       according to the query's orderings. """
@@ -471,36 +521,51 @@
       for o in query.order_list():
         prop = o.property().decode('utf-8')
 
-        if o.direction() is datastore_pb.Query_Order.ASCENDING:
-          selector = min
-        else:
-          selector = max
+        reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
 
         a_val = a[prop]
         if isinstance(a_val, list):
-          a_val = selector(a_val)
+          a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
 
         b_val = b[prop]
         if isinstance(b_val, list):
-          b_val = selector(b_val)
+          b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
 
-        try:
-          cmped = cmp(a_val, b_val)
-        except TypeError:
-          cmped = NotImplementedError
-
-        if cmped == NotImplementedError:
-          cmped = cmp(type(a_val), type(b_val))
+        cmped = order_compare_properties(a_val, b_val)
 
         if o.direction() is datastore_pb.Query_Order.DESCENDING:
           cmped = -cmped
 
         if cmped != 0:
           return cmped
+
       if cmped == 0:
         return cmp(a.key(), b.key())
 
-    results.sort(order_compare)
+    def order_compare_properties(x, y):
+      """Return a negative, zero or positive number depending on whether
+      property value x is considered smaller than, equal to, or larger than
+      property value y. If x and y are different types, they're compared based
+      on the type ordering used in the real datastore, which is based on the
+      tag numbers in the PropertyValue PB.
+      """
+      if isinstance(x, datetime.datetime):
+        x = datastore_types.DatetimeToTimestamp(x)
+      if isinstance(y, datetime.datetime):
+        y = datastore_types.DatetimeToTimestamp(y)
+
+      x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
+      y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)
+
+      if x_type == y_type:
+        try:
+          return cmp(x, y)
+        except TypeError:
+          return 0
+      else:
+        return cmp(x_type, y_type)
+
+    results.sort(order_compare_entities)
 
     offset = 0
     limit = len(results)
@@ -614,7 +679,7 @@
         props = {}
 
         for entity in self.__entities[(app, kind)].values():
-          for prop in entity.property_list():
+          for prop in entity.native.property_list():
             if prop.name() not in props:
               props[prop.name()] = entity_pb.PropertyValue()
             props[prop.name()].MergeFrom(prop.value())
--- a/thirdparty/google_appengine/google/appengine/api/datastore_types.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_types.py	Fri Oct 10 13:14:24 2008 +0000
@@ -1018,10 +1018,9 @@
 
   Raises:
     BadPropertyError if the property name is invalid. BadValueError if the
-    property did not validate correctly, a list property did not have values
-    all of the same type, or the value was an empty list. Other exception types
-    (like OverflowError) if the property value does not meet type-specific
-    criteria.
+    property did not validate correctly or the value was an empty list. Other
+    exception types (like OverflowError) if the property value does not meet
+    type-specific criteria.
   """
   ValidateString(name, 'property name', datastore_errors.BadPropertyError)
   if RESERVED_PROPERTY_NAME.match(name):
@@ -1047,21 +1046,13 @@
         (name, repr(values)))
 
   try:
-    proptype = values[0].__class__
-    prop_validator = _VALIDATE_PROPERTY_VALUES.get(proptype)
-    if prop_validator is None:
-      raise datastore_errors.BadValueError(
-        'Unsupported type for property %s: %s' % (name, proptype))
+    for v in values:
+      prop_validator = _VALIDATE_PROPERTY_VALUES.get(v.__class__)
+      if prop_validator is None:
+        raise datastore_errors.BadValueError(
+          'Unsupported type for property %s: %s' % (name, v.__class__))
+      prop_validator(name, v)
 
-    for v in values:
-      if v is not None:
-        if (v.__class__ is not proptype and not
-            (v.__class__ in _STRING_TYPES and proptype in _STRING_TYPES)):
-          raise datastore_errors.BadValueError(
-              'Values for property %s have mismatched types: %s (a %s) and '
-              '%s (a %s).' % (name, values[0], proptype, v, typename(v)))
-
-        prop_validator(name, v)
   except (KeyError, ValueError, TypeError, IndexError, AttributeError), msg:
     raise datastore_errors.BadValueError(
       'Error type checking values for property %s: %s' % (name, msg))
@@ -1100,10 +1091,19 @@
     value: A datetime.datetime instance.
     pbvalue: The entity_pb.PropertyValue to pack this value into.
   """
+  pbvalue.set_int64value(DatetimeToTimestamp(value))
+
+
+def DatetimeToTimestamp(value):
+  """Converts a datetime.datetime to seconds since the epoch, as a float.
+  Args:
+    value: datetime.datetime
+
+  Returns: value as a long
+  """
   if value.tzinfo:
     value = value.astimezone(UTC)
-  pbvalue.set_int64value(
-    long(calendar.timegm(value.timetuple()) * 1000000L) + value.microsecond)
+  return long(calendar.timegm(value.timetuple()) * 1000000L) + value.microsecond
 
 
 def PackGeoPt(name, value, pbvalue):
@@ -1226,21 +1226,21 @@
   values_type = type(values)
   if values_type is list:
     multiple = True
-    proptype = type(values[0])
   else:
     multiple = False
-    proptype = type(values)
     values = [values]
 
-  pack_prop = _PACK_PROPERTY_VALUES[proptype]
   pbs = []
   for v in values:
     pb = entity_pb.Property()
     pb.set_name(encoded_name)
     pb.set_multiple(multiple)
-    meaning = _PROPERTY_MEANINGS.get(proptype)
+
+    meaning = _PROPERTY_MEANINGS.get(v.__class__)
     if meaning is not None:
       pb.set_meaning(meaning)
+
+    pack_prop = _PACK_PROPERTY_VALUES[v.__class__]
     pbvalue = pack_prop(name, v, pb.mutable_value())
     pbs.append(pb)
 
@@ -1328,11 +1328,7 @@
     auth_domain = unicode(pbval.uservalue().auth_domain().decode('utf-8'))
     value = users.User(email=email, _auth_domain=auth_domain)
   else:
-    if pb.multiple():
-      raise datastore_errors.BadValueError(
-          'Record indicated as multiple, but has no values.')
-    else:
-      value = None
+    value = None
 
   try:
     if pb.has_meaning():
--- a/thirdparty/google_appengine/google/appengine/api/mail.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/mail.py	Fri Oct 10 13:14:24 2008 +0000
@@ -474,7 +474,7 @@
       make_sync_call('mail', self._API_CALL, message, response)
     except apiproxy_errors.ApplicationError, e:
       if e.application_error in ERROR_MAP:
-        raise ERROR_MAP[e.application_error]()
+        raise ERROR_MAP[e.application_error](e.error_detail)
       raise e
 
   def Send(self, *args, **kwds):
--- a/thirdparty/google_appengine/google/appengine/api/memcache/__init__.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/__init__.py	Fri Oct 10 13:14:24 2008 +0000
@@ -341,7 +341,7 @@
     response = MemcacheStatsResponse()
     try:
       self._make_sync_call('memcache', 'Stats', request, response)
-    except apiproxy_errors.ApplicationError, e:
+    except apiproxy_errors.Error:
       return None
 
     if not response.has_stats():
@@ -367,7 +367,7 @@
     response = MemcacheFlushResponse()
     try:
       self._make_sync_call('memcache', 'FlushAll', request, response)
-    except apiproxy_errors.ApplicationError:
+    except apiproxy_errors.Error:
       return False
     return True
 
@@ -391,7 +391,7 @@
     response = MemcacheGetResponse()
     try:
       self._make_sync_call('memcache', 'Get', request, response)
-    except apiproxy_errors.ApplicationError:
+    except apiproxy_errors.Error:
       return None
 
     if not response.item_size():
@@ -427,7 +427,7 @@
       request.add_key(_key_string(key, key_prefix, user_key))
     try:
       self._make_sync_call('memcache', 'Get', request, response)
-    except apiproxy_errors.ApplicationError:
+    except apiproxy_errors.Error:
       return {}
 
     return_value = {}
@@ -470,7 +470,7 @@
     delete_item.set_delete_time(int(math.ceil(seconds)))
     try:
       self._make_sync_call('memcache', 'Delete', request, response)
-    except apiproxy_errors.ApplicationError:
+    except apiproxy_errors.Error:
       return DELETE_NETWORK_FAILURE
     assert response.delete_status_size() == 1, 'Unexpected status size.'
 
@@ -512,7 +512,7 @@
       delete_item.set_delete_time(int(math.ceil(seconds)))
     try:
       self._make_sync_call('memcache', 'Delete', request, response)
-    except apiproxy_errors.ApplicationError:
+    except apiproxy_errors.Error:
       return False
     return True
 
@@ -607,18 +607,20 @@
     response = MemcacheSetResponse()
     try:
       self._make_sync_call('memcache', 'Set', request, response)
-    except apiproxy_errors.ApplicationError:
+    except apiproxy_errors.Error:
       return False
     if response.set_status_size() != 1:
       return False
     return response.set_status(0) == MemcacheSetResponse.STORED
 
-  def set_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
-    """Set multiple keys' values at once.
+  def _set_multi_with_policy(self, policy, mapping, time=0, key_prefix=''):
+    """Set multiple keys with a specified policy.
 
-    This reduces the network latency of doing many requests in serial.
+    Helper function for set_multi(), add_multi(), and replace_multi(). This
+    reduces the network latency of doing many requests in serial.
 
     Args:
+      policy:  One of MemcacheSetRequest.SET, ADD, or REPLACE.
       mapping: Dictionary of keys to values.
       time: Optional expiration time, either relative number of seconds
         from current time (up to 1 month), or an absolute Unix epoch time.
@@ -626,11 +628,12 @@
         memory pressure.  Float values will be rounded up to the nearest
         whole second.
       key_prefix: Prefix for to prepend to all keys.
-      min_compress_len: Unimplemented compatibility option.
 
     Returns:
       A list of keys whose values were NOT set.  On total success,
-      this list should be empty.
+      this list should be empty.  On network/RPC/server errors,
+      a list of all input keys is returned; in this case the keys
+      may or may not have been updated.
     """
     if not isinstance(time, (int, long, float)):
       raise TypeError('Expiration must be a number.')
@@ -649,14 +652,14 @@
       item.set_key(server_key)
       item.set_value(stored_value)
       item.set_flags(flags)
-      item.set_set_policy(MemcacheSetRequest.SET)
+      item.set_set_policy(policy)
       item.set_expiration_time(int(math.ceil(time)))
 
     response = MemcacheSetResponse()
     try:
       self._make_sync_call('memcache', 'Set', request, response)
-    except apiproxy_errors.ApplicationError:
-      return False
+    except apiproxy_errors.Error:
+      return user_key.values()
 
     assert response.set_status_size() == len(server_keys)
 
@@ -667,6 +670,66 @@
 
     return unset_list
 
+  def set_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
+    """Set multiple keys' values at once, regardless of previous contents.
+
+    Args:
+      mapping: Dictionary of keys to values.
+      time: Optional expiration time, either relative number of seconds
+        from current time (up to 1 month), or an absolute Unix epoch time.
+        By default, items never expire, though items may be evicted due to
+        memory pressure.  Float values will be rounded up to the nearest
+        whole second.
+      key_prefix: Prefix for to prepend to all keys.
+      min_compress_len: Unimplemented compatibility option.
+
+    Returns:
+      A list of keys whose values were NOT set.  On total success,
+      this list should be empty.
+    """
+    return self._set_multi_with_policy(MemcacheSetRequest.SET, mapping,
+                                       time=time, key_prefix=key_prefix)
+
+  def add_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
+    """Set multiple keys' values iff items are not already in memcache.
+
+    Args:
+      mapping: Dictionary of keys to values.
+      time: Optional expiration time, either relative number of seconds
+        from current time (up to 1 month), or an absolute Unix epoch time.
+        By default, items never expire, though items may be evicted due to
+        memory pressure.  Float values will be rounded up to the nearest
+        whole second.
+      key_prefix: Prefix for to prepend to all keys.
+      min_compress_len: Unimplemented compatibility option.
+
+    Returns:
+      A list of keys whose values were NOT set because they did not already
+      exist in memcache.  On total success, this list should be empty.
+    """
+    return self._set_multi_with_policy(MemcacheSetRequest.ADD, mapping,
+                                       time=time, key_prefix=key_prefix)
+
+  def replace_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
+    """Replace multiple keys' values, failing if the items aren't in memcache.
+
+    Args:
+      mapping: Dictionary of keys to values.
+      time: Optional expiration time, either relative number of seconds
+        from current time (up to 1 month), or an absolute Unix epoch time.
+        By default, items never expire, though items may be evicted due to
+        memory pressure.  Float values will be rounded up to the nearest
+        whole second.
+      key_prefix: Prefix for to prepend to all keys.
+      min_compress_len: Unimplemented compatibility option.
+
+    Returns:
+      A list of keys whose values were NOT set because they already existed
+      in memcache.  On total success, this list should be empty.
+    """
+    return self._set_multi_with_policy(MemcacheSetRequest.REPLACE, mapping,
+                                       time=time, key_prefix=key_prefix)
+
   def incr(self, key, delta=1):
     """Atomically increments a key's value.
 
@@ -685,8 +748,9 @@
         defaulting to 1.
 
     Returns:
-      New long integer value, or None if key was not in the cache or could not
-      be incremented for any other reason.
+      New long integer value, or None if key was not in the cache, could not
+      be incremented for any other reason, or a network/RPC/server error
+      occurred.
 
     Raises:
       ValueError: If number is negative.
@@ -710,7 +774,7 @@
 
     Returns:
       New long integer value, or None if key wasn't in cache and couldn't
-      be decremented.
+      be decremented, or a network/RPC/server error occurred.
 
     Raises:
       ValueError: If number is negative.
@@ -728,7 +792,8 @@
         or decrement by.
 
     Returns:
-      New long integer value, or None on cache miss.
+      New long integer value, or None on cache miss or network/RPC/server
+      error.
 
     Raises:
       ValueError: If delta is negative.
@@ -750,7 +815,7 @@
 
     try:
       self._make_sync_call('memcache', 'Increment', request, response)
-    except apiproxy_errors.ApplicationError:
+    except apiproxy_errors.Error:
       return None
 
     if response.has_new_value():
@@ -783,7 +848,9 @@
   var_dict['set'] = _CLIENT.set
   var_dict['set_multi'] = _CLIENT.set_multi
   var_dict['add'] = _CLIENT.add
+  var_dict['add_multi'] = _CLIENT.add_multi
   var_dict['replace'] = _CLIENT.replace
+  var_dict['replace_multi'] = _CLIENT.replace_multi
   var_dict['delete'] = _CLIENT.delete
   var_dict['delete_multi'] = _CLIENT.delete_multi
   var_dict['incr'] = _CLIENT.incr
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch.py	Fri Oct 10 13:14:24 2008 +0000
@@ -175,9 +175,13 @@
 
   scheme, host_port, path, query, fragment = urlparse.urlsplit(url)
 
-  if (host_port == os.environ['HTTP_HOST'] and
-      urllib2.unquote(path) == urllib2.unquote(os.environ['PATH_INFO'])):
-    return True
+  if host_port == os.environ['HTTP_HOST']:
+    current_path = urllib2.unquote(os.environ['PATH_INFO'])
+    desired_path = urllib2.unquote(path)
+
+    if (current_path == desired_path or
+        (current_path in ('', '/') and desired_path in ('', '/'))):
+      return True
 
   return False
 
@@ -264,6 +268,9 @@
     if (e.application_error ==
         urlfetch_service_pb.URLFetchServiceError.RESPONSE_TOO_LARGE):
       raise ResponseTooLargeError(None)
+    if (e.application_error ==
+        urlfetch_service_pb.URLFetchServiceError.DEADLINE_EXCEEDED):
+      raise DownloadError(str(e))
     raise e
   result = _URLFetchResult(response)
 
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py	Fri Oct 10 13:14:24 2008 +0000
@@ -30,6 +30,7 @@
   FETCH_ERROR  =    2
   UNSPECIFIED_ERROR =    3
   RESPONSE_TOO_LARGE =    4
+  DEADLINE_EXCEEDED =    5
 
   _ErrorCode_NAMES = {
     0: "OK",
@@ -37,6 +38,7 @@
     2: "FETCH_ERROR",
     3: "UNSPECIFIED_ERROR",
     4: "RESPONSE_TOO_LARGE",
+    5: "DEADLINE_EXCEEDED",
   }
 
   def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
--- a/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Fri Oct 10 13:14:24 2008 +0000
@@ -755,7 +755,7 @@
     else:
       string_value = ''
     return '<input class="%s" name="%s" type="text" size="%d" value="%s"/>' % (cgi.escape(self.name()), cgi.escape(name), self.input_field_size(),
-            cgi.escape(string_value))
+            cgi.escape(string_value, True))
 
   def input_field_size(self):
     return 30
--- a/thirdparty/google_appengine/google/appengine/ext/bulkload/__init__.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/bulkload/__init__.py	Fri Oct 10 13:14:24 2008 +0000
@@ -28,6 +28,7 @@
   'Person',
   [('name', str),
    ('email', datastore_types.Email),
+   ('cool', bool), # ('0', 'False', 'No', '')=False, otherwise bool(value)
    ('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
   ])
 
@@ -108,7 +109,7 @@
 import traceback
 import types
 import struct
-
+import zlib
 
 import google
 import wsgiref.handlers
@@ -227,6 +228,8 @@
 
     entity = datastore.Entity(self.__kind, name=key_name)
     for (name, converter), val in zip(self.__properties, values):
+      if converter is bool and val.lower() in ('0', 'false', 'no'):
+          val = False
       entity[name] = converter(val)
 
     entities = self.HandleEntity(entity)
@@ -341,9 +344,51 @@
     page += '</body></html>'
     return page
 
+  def IterRows(self, reader):
+    """ Yields a tuple of a line number and row for each row of the CSV data.
+
+    Args:
+      reader: a csv reader for the input data.
+    """
+    line_num = 1
+    for columns in reader:
+      yield (line_num, columns)
+      line_num += 1
+
+  def LoadEntities(self, iter, loader, key_format=None):
+    """Generates entities and loads them into the datastore.  Returns
+    a tuple of HTTP code and string reply.
+
+    Args:
+      iter: an iterator yielding pairs of a line number and row contents.
+      key_format: a format string to convert a line number into an
+        entity id. If None, then entity ID's are automatically generated.
+      """
+    entities = []
+    output = []
+    for line_num, columns in iter:
+      key_name = None
+      if key_format is not None:
+        key_name = key_format % line_num
+      if columns:
+        try:
+          output.append('\nLoading from line %d...' % line_num)
+          new_entities = loader.CreateEntity(columns, key_name=key_name)
+          if new_entities:
+            entities.extend(new_entities)
+          output.append('done.')
+        except:
+          stacktrace = traceback.format_exc()
+          output.append('error:\n%s' % stacktrace)
+          return (httplib.BAD_REQUEST, ''.join(output))
+
+    for entity in entities:
+      datastore.Put(entity)
+
+    return (httplib.OK, ''.join(output))
 
   def Load(self, kind, data):
-    """ Parses CSV data, uses a Loader to convert to entities, and stores them.
+    """Parses CSV data, uses a Loader to convert to entities, and stores them.
 
     On error, fails fast. Returns a "bad request" HTTP response code and
     includes the traceback in the output.
@@ -375,28 +420,34 @@
     except AttributeError:
       pass
 
-    entities = []
+    return self.LoadEntities(self.IterRows(reader), loader)
+
+  def IterRowsV1(self, data):
+    """Yields a tuple of columns for each row in the uploaded data.
+
+    Args:
+      data: a string containing the unzipped v1 format data to load.
+
+    """
+    column_count, = struct.unpack_from('!i', data)
+    offset = 4
+
+    lengths_format = '!%di' % (column_count,)
 
-    line_num = 1
-    for columns in reader:
-      if columns:
-        try:
-          output.append('\nLoading from line %d...' % line_num)
-          new_entities = loader.CreateEntity(columns)
-          if new_entities:
-            entities.extend(new_entities)
-          output.append('done.')
-        except:
-          stacktrace = traceback.format_exc()
-          output.append('error:\n%s' % stacktrace)
-          return (httplib.BAD_REQUEST, ''.join(output))
+    while offset < len(data):
+      id_num = struct.unpack_from('!i', data, offset=offset)
+      offset += 4
+
+      value_lengths = struct.unpack_from(lengths_format, data, offset=offset)
+      offset += 4 * column_count
 
-      line_num += 1
+      columns = struct.unpack_from(''.join('%ds' % length
+                                           for length in value_lengths), data,
+                                   offset=offset)
+      offset += sum(value_lengths)
 
-    for entity in entities:
-      datastore.Put(entity)
+      yield (id_num, columns)
 
-    return (httplib.OK, ''.join(output))
 
   def LoadV1(self, kind, data):
     """Parses version-1 format data, converts to entities, and stores them.
@@ -421,46 +472,19 @@
       loader = Loader.RegisteredLoaders()[kind]
     except KeyError:
       output.append('Error: no Loader defined for kind %s.' % kind)
-      return httplib.BAD_REQUEST, ''.join(output)
-
-    entities = []
-
-    column_count, = struct.unpack_from('!i', data)
-
-    offset = 4
-
-    lengths_format = '!%di' % (column_count,)
-
-    while offset < len(data):
-      id_num = struct.unpack_from('!i', data, offset=offset)
-      offset += 4
-
-      key_name = 'i%010d' % id_num
-
-      value_lengths = struct.unpack_from(lengths_format, data, offset=offset)
-      offset += 4 * column_count
+      return (httplib.BAD_REQUEST, ''.join(output))
 
-      columns = struct.unpack_from(''.join('%ds' % length
-                                           for length in value_lengths), data,
-                                   offset=offset)
-      offset += sum(value_lengths)
+    try:
+      data = zlib.decompress(data)
+    except:
+      stacktrace = traceback.format_exc()
+      output.append('Error: Could not decompress data\n%s' % stacktrace)
+      return (httplib.BAD_REQUEST, ''.join(output))
 
-      try:
-        output.append('Loading key_name=%s... ' % key_name)
-        new_entities = loader.CreateEntity(columns, key_name=key_name)
-        if new_entities:
-          entities.extend(new_entities)
-        output.append('done.\n')
-      except:
-        stacktrace = traceback.format_exc()
-        output.append('error:\n%s' % stacktrace)
-        return httplib.BAD_REQUEST, ''.join(output)
-
-    for entity in entities:
-      datastore.Put(entity)
-
-    return httplib.OK, ''.join(output)
-
+    key_format = 'i%010d'
+    return self.LoadEntities(self.IterRowsV1(data),
+                             loader,
+                             key_format=key_format)
 
 def main(*loaders):
   """Starts bulk upload.
--- a/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Fri Oct 10 13:14:24 2008 +0000
@@ -1938,6 +1938,16 @@
     """
     return datetime.datetime.now().time()
 
+  def empty(self, value):
+    """Is time property empty.
+
+    "0:0" (midnight) is not an empty value.
+
+    Returns:
+      True if value is None, else False.
+    """
+    return value is None
+
   def get_value_for_datastore(self, model_instance):
     """Get value from property to send to datastore.
 
@@ -2201,19 +2211,24 @@
     return list(super(ListProperty, self).default_value())
 
 
-def StringListProperty(verbose_name=None, default=None, **kwds):
-  """A shorthand for the most common type of ListProperty.
-
-  Args:
-    verbose_name: Optional verbose name.
-    default: Optional default value; if omitted, an empty list is used.
-    **kwds: Optional additional keyword arguments, passed to ListProperty().
-
-  Returns:
-    A ListProperty instance whose item type is basestring and whose other
-    arguments are whatever was passed here.
+class StringListProperty(ListProperty):
+  """A property that stores a list of strings.
+
+  A shorthand for the most common type of ListProperty.
   """
-  return ListProperty(basestring, verbose_name, default, **kwds)
+
+  def __init__(self, verbose_name=None, default=None, **kwds):
+    """Construct StringListProperty.
+
+    Args:
+      verbose_name: Optional verbose name.
+      default: Optional default value; if omitted, an empty list is used.
+      **kwds: Optional additional keyword arguments, passed to ListProperty().
+    """
+    super(StringListProperty, self).__init__(basestring,
+                                             verbose_name=verbose_name,
+                                             default=default,
+                                             **kwds)
 
 
 class ReferenceProperty(Property):
--- a/thirdparty/google_appengine/google/appengine/ext/search/__init__.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/search/__init__.py	Fri Oct 10 13:14:24 2008 +0000
@@ -122,9 +122,10 @@
    'where', 'whether', 'which', 'while', 'who', 'whose', 'why', 'widely',
    'will', 'with', 'within', 'without', 'would', 'yet', 'you'])
 
-  _PUNCTUATION_REGEX = re.compile('[' + re.escape(string.punctuation) + ']')
+  _word_delimiter_regex = re.compile('[' + re.escape(string.punctuation) + ']')
 
-  def __init__(self, kind_or_entity, *args, **kwargs):
+  def __init__(self, kind_or_entity, word_delimiter_regex=None, *args,
+               **kwargs):
     """Constructor. May be called as a copy constructor.
 
     If kind_or_entity is a datastore.Entity, copies it into this Entity.
@@ -137,7 +138,9 @@
 
     Args:
       kind_or_entity: string or datastore.Entity
+      word_delimiter_regex: a regex matching characters that delimit words
     """
+    self._word_delimiter_regex = word_delimiter_regex
     if isinstance(kind_or_entity, datastore.Entity):
       self._Entity__key = kind_or_entity._Entity__key
       self.update(kind_or_entity)
@@ -160,7 +163,8 @@
       if (isinstance(values[0], basestring) and
           not isinstance(values[0], datastore_types.Blob)):
         for value in values:
-          index.update(SearchableEntity._FullTextIndex(value))
+          index.update(SearchableEntity._FullTextIndex(
+              value, self._word_delimiter_regex))
 
     index_list = list(index)
     if index_list:
@@ -169,7 +173,7 @@
     return super(SearchableEntity, self)._ToPb()
 
   @classmethod
-  def _FullTextIndex(cls, text):
+  def _FullTextIndex(cls, text, word_delimiter_regex=None):
     """Returns a set of keywords appropriate for full text indexing.
 
     See SearchableQuery.Search() for details.
@@ -181,9 +185,12 @@
       set of strings
     """
 
+    if word_delimiter_regex is None:
+      word_delimiter_regex = cls._word_delimiter_regex
+
     if text:
       datastore_types.ValidateString(text, 'text', max_len=sys.maxint)
-      text = cls._PUNCTUATION_REGEX.sub(' ', text)
+      text = word_delimiter_regex.sub(' ', text)
       words = text.lower().split()
 
       words = set(unicode(w) for w in words)
@@ -206,7 +213,7 @@
   SearchableEntity or SearchableModel classes.
   """
 
-  def Search(self, search_query):
+  def Search(self, search_query, word_delimiter_regex=None):
     """Add a search query. This may be combined with filters.
 
     Note that keywords in the search query will be silently dropped if they
@@ -221,6 +228,7 @@
     """
     datastore_types.ValidateString(search_query, 'search query')
     self._search_query = search_query
+    self._word_delimiter_regex = word_delimiter_regex
     return self
 
   def _ToPb(self, limit=None, offset=None):
@@ -245,7 +253,8 @@
     pb = super(SearchableQuery, self)._ToPb(limit=limit, offset=offset)
 
     if hasattr(self, '_search_query'):
-      keywords = SearchableEntity._FullTextIndex(self._search_query)
+      keywords = SearchableEntity._FullTextIndex(
+          self._search_query, self._word_delimiter_regex)
       for keyword in keywords:
         filter = pb.add_filter()
         filter.set_op(datastore_pb.Query_Filter.EQUAL)
--- a/thirdparty/google_appengine/google/appengine/ext/webapp/__init__.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/webapp/__init__.py	Fri Oct 10 13:14:24 2008 +0000
@@ -70,6 +70,7 @@
 import wsgiref.util
 
 RE_FIND_GROUPS = re.compile('\(.*?\)')
+_CHARSET_RE = re.compile(r';\s*charset=([^;\s]*)', re.I)
 
 class Error(Exception):
   """Base of all exceptions in the webapp module."""
@@ -107,8 +108,10 @@
     Args:
       environ: A WSGI-compliant environment dictionary.
     """
-    charset = webob.NoDefault
-    if environ.get('CONTENT_TYPE', '').find('charset') == -1:
+    match = _CHARSET_RE.search(environ.get('CONTENT_TYPE', ''))
+    if match:
+      charset = match.group(1).lower()
+    else:
       charset = 'utf-8'
 
     webob.Request.__init__(self, environ, charset=charset,
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Fri Oct 10 06:56:56 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Fri Oct 10 13:14:24 2008 +0000
@@ -513,12 +513,19 @@
 
 
 def NotImplementedFake(*args, **kwargs):
-  """Fake for methods/classes that are not implemented in the production
+  """Fake for methods/functions that are not implemented in the production
   environment.
   """
   raise NotImplementedError("This class/method is not available.")
 
 
+class NotImplementedFakeClass(object):
+  """Fake class for classes that are not implemented in the production
+  environment.
+  """
+  __init__ = NotImplementedFake
+
+
 def IsEncodingsModule(module_name):
   """Determines if the supplied module is related to encodings in any way.
 
@@ -674,7 +681,7 @@
                       if os.path.isfile(filename))
 
   ALLOWED_DIRS = set([
-    os.path.normcase(os.path.abspath(os.path.dirname(os.__file__)))
+    os.path.normcase(os.path.realpath(os.path.dirname(os.__file__)))
   ])
 
   NOT_ALLOWED_DIRS = set([
@@ -899,6 +906,7 @@
     '_codecs_jp',
     '_codecs_kr',
     '_codecs_tw',
+    '_collections',
     '_csv',
     '_elementtree',
     '_functools',
@@ -1028,6 +1036,8 @@
       'AF_INET': None,
       'SOCK_STREAM': None,
       'SOCK_DGRAM': None,
+      '_GLOBAL_DEFAULT_TIMEOUT': getattr(socket, '_GLOBAL_DEFAULT_TIMEOUT',
+                                         None),
     },
 
     'tempfile': {
@@ -1525,8 +1535,7 @@
     depth_count += 1
 
   for index in xrange(depth_count):
-    current_init_file = os.path.abspath(
-        os.path.join(module_base, '__init__.py'))
+    current_init_file = os.path.join(module_base, '__init__.py')
 
     if not isfile(current_init_file):
       missing_init_files.append(current_init_file)
@@ -1750,7 +1759,7 @@
     __builtin__.open = FakeFile
     types.FileType = FakeFile
 
-    __builtin__.buffer = NotImplementedFake
+    __builtin__.buffer = NotImplementedFakeClass
 
     logging.debug('Executing CGI with env:\n%s', pprint.pformat(env))
     try:
@@ -1920,14 +1929,7 @@
       path = os.path.join(os.path.dirname(os.path.dirname(google.__file__)),
                           path[len(PYTHON_LIB_VAR) + 1:])
     else:
-      if os.path.sep == '\\':
-        root = self._root_path.replace('\\', '\\\\')
-        if root.endswith('\\'):
-          path = root + path
-        else:
-          path = root + '\\\\' + path
-      else:
-        path = os.path.join(self._root_path, path)
+      path = os.path.join(self._root_path, path)
 
     return path
 
@@ -1974,18 +1976,18 @@
           continue
 
         if handler_type == appinfo.STATIC_FILES:
-          regex = entry.upload
+          regex = entry.upload + '$'
         else:
-          static_dir = entry.static_dir
-          if static_dir[-1] == '/':
-            static_dir = static_dir[:-1]
-          regex = '/'.join((entry.static_dir, r'(.*)'))
-
-        adjusted_regex = r'^%s$' % path_adjuster.AdjustPath(regex)
+          path = entry.static_dir
+          if path[-1] == '/':
+            path = path[:-1]
+          regex = re.escape(path) + r'/(.*)'
+
         try:
-          path_re = re.compile(adjusted_regex)
+          path_re = re.compile(regex)
         except re.error, e:
-          raise InvalidAppConfigError('regex does not compile: %s' % e)
+          raise InvalidAppConfigError('regex %s does not compile: %s' %
+                                      (regex, e))
 
         if self._default_expiration is None:
           expiration = 0
@@ -2000,7 +2002,7 @@
     """Returns the mime type that we should use when serving the specified file.
 
     Args:
-      path: String containing the file's path on disk.
+      path: String containing the file's path relative to the app.
 
     Returns:
       String containing the mime type to use. Will be 'application/octet-stream'
@@ -2019,7 +2021,7 @@
     """Returns the cache expiration duration to be users for the given file.
 
     Args:
-      path: String containing the file's path on disk.
+      path: String containing the file's path relative to the app.
 
     Returns:
       Integer number of seconds to be used for browser cache expiration time.
@@ -2094,8 +2096,8 @@
     """Reads the file and returns the response status and data."""
     full_path = self._path_adjuster.AdjustPath(path)
     status, data = self._read_data_file(full_path)
-    content_type = self._static_file_config_matcher.GetMimeType(full_path)
-    expiration = self._static_file_config_matcher.GetExpiration(full_path)
+    content_type = self._static_file_config_matcher.GetMimeType(path)
+    expiration = self._static_file_config_matcher.GetExpiration(path)
 
     outfile.write('Status: %d\r\n' % status)
     outfile.write('Content-type: %s\r\n' % content_type)
@@ -2819,7 +2821,7 @@
   Returns:
     Instance of BaseHTTPServer.HTTPServer that's ready to start accepting.
   """
-  absolute_root_path = os.path.abspath(root_path)
+  absolute_root_path = os.path.realpath(root_path)
 
   SetupTemplates(template_dir)
   FakeFile.SetAllowedPaths([absolute_root_path,