Update Google App Engine from 1.2.5 to 1.2.7 in thirdparty folder.
--- a/thirdparty/google_appengine/RELEASE_NOTES Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/RELEASE_NOTES Fri Oct 23 13:54:11 2009 -0500
@@ -3,6 +3,35 @@
App Engine Python SDK - Release Notes
+Version 1.2.7 - October 14, 2009
+================================
+ - Changed the 'key' parameter to Model.__init__ to be keyword only.
+ - Fixed taskqueue import in Remote API.
+ http://code.google.com/p/googleappengine/issues/detail?id=2259
+
+
+Version 1.2.6 - September 17, 2009
+==================================
+ - Added incoming email support.
+ http://code.google.com/p/googleappengine/issues/detail?id=165
+ - Remote API now supports XMPP and task queues.
+ - The default for all handlers is now secure: optional. Users can
+ now access all pages via SSL unless explicitly disallowed.
+ - Remote API now supports HTTPS.
+ http://code.google.com/p/googleappengine/issues/detail?id=1461
+ - Appcfg now uses https by default.
+ http://code.google.com/p/googleappengine/issues/detail?id=794
+ - Appcfg.py now supports the --application and --version flags to
+ override the values specified in app.yaml.
+ http://code.google.com/p/googleappengine/issues/detail?id=670
+ - GQL now supports '= NULL' queries.
+ - The db.Model constructor now supports explicitly setting a key
+ (and thus an id) for a Model instance.
+ - New Datastore stats api. Stats are also visible in the admin console.
+ - Bulkloader dump and restore now supports restoring to a different
+ app id and restoring numeric keys.
+
+
Version 1.2.5 - August 13, 2009
===============================
- The Windows Python SDK now includes a GUI launcher, similar to the Mac SDK.
--- a/thirdparty/google_appengine/VERSION Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/VERSION Fri Oct 23 13:54:11 2009 -0500
@@ -1,3 +1,3 @@
-release: "1.2.5"
-timestamp: 1250206498
+release: "1.2.7"
+timestamp: 1255546941
api_versions: ['1']
--- a/thirdparty/google_appengine/google/appengine/api/apiproxy_stub_map.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/apiproxy_stub_map.py Fri Oct 23 13:54:11 2009 -0500
@@ -52,8 +52,8 @@
"""
stub = apiproxy.GetStub(service)
assert stub, 'No api proxy found for service "%s"' % service
- assert hasattr(stub, 'CreateRPC'), ('The service "%s" doesn\'t have ' +
- 'a CreateRPC method.' % service)
+ assert hasattr(stub, 'CreateRPC'), (('The service "%s" doesn\'t have ' +
+ 'a CreateRPC method.') % service)
return stub.CreateRPC()
@@ -153,19 +153,29 @@
self.__content = []
self.__unique_keys = set()
- def Call(self, service, call, request, response, rpc=None):
+ def Call(self, service, call, request, response, rpc=None, error=None):
"""Invokes all hooks in this collection.
+ NOTE: For backwards compatibility, if error is not None, hooks
+ with 4 or 5 arguments are *not* called. This situation
+ (error=None) only occurs when the RPC request raised an exception;
+ in the past no hooks would be called at all in that case.
+
Args:
service: string representing which service to call
call: string representing which function to call
request: protocol buffer for the request
response: protocol buffer for the response
rpc: optional RPC used to make this call
+ error: optional Exception instance to be passed as 6th argument
"""
for key, function, srv, num_args in self.__content:
if srv is None or srv == service:
- if num_args == 5:
+ if num_args == 6:
+ function(service, call, request, response, rpc, error)
+ elif error is not None:
+ pass
+ elif num_args == 5:
function(service, call, request, response, rpc)
else:
function(service, call, request, response)
@@ -250,14 +260,24 @@
if hasattr(stub, 'CreateRPC'):
rpc = stub.CreateRPC()
self.__precall_hooks.Call(service, call, request, response, rpc)
- rpc.MakeCall(service, call, request, response)
- rpc.Wait()
- rpc.CheckSuccess()
- self.__postcall_hooks.Call(service, call, request, response, rpc)
+ try:
+ rpc.MakeCall(service, call, request, response)
+ rpc.Wait()
+ rpc.CheckSuccess()
+ except Exception, err:
+ self.__postcall_hooks.Call(service, call, request, response, rpc, err)
+ raise
+ else:
+ self.__postcall_hooks.Call(service, call, request, response, rpc)
else:
self.__precall_hooks.Call(service, call, request, response)
- stub.MakeSyncCall(service, call, request, response)
- self.__postcall_hooks.Call(service, call, request, response)
+ try:
+ stub.MakeSyncCall(service, call, request, response)
+ except Exception, err:
+ self.__postcall_hooks.Call(service, call, request, response, None, err)
+ raise
+ else:
+ self.__postcall_hooks.Call(service, call, request, response)
class UserRPC(object):
--- a/thirdparty/google_appengine/google/appengine/api/appinfo.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/appinfo.py Fri Oct 23 13:54:11 2009 -0500
@@ -71,6 +71,7 @@
SECURE_HTTP = 'never'
SECURE_HTTPS = 'always'
SECURE_HTTP_OR_HTTPS = 'optional'
+SECURE_DEFAULT = 'default'
REQUIRE_MATCHING_FILE = 'require_matching_file'
@@ -188,7 +189,8 @@
SECURE: validation.Options(SECURE_HTTP,
SECURE_HTTPS,
SECURE_HTTP_OR_HTTPS,
- default=SECURE_HTTP),
+ SECURE_DEFAULT,
+ default=SECURE_DEFAULT),
@@ -342,6 +344,19 @@
'Found more than %d URLMap entries in application configuration' %
MAX_URL_MAPS)
+ def FixSecureDefaults(self):
+ """Force omitted 'secure: ...' handler fields to 'secure: optional'.
+
+ The effect is that handler.secure is never equal to the (nominal)
+ default.
+
+ See http://b/issue?id=2073962.
+ """
+ if self.handlers:
+ for handler in self.handlers:
+ if handler.secure == SECURE_DEFAULT:
+ handler.secure = SECURE_HTTP_OR_HTTPS
+
def LoadSingleAppInfo(app_info):
"""Load a single AppInfo object where one and only one is expected.
@@ -370,6 +385,7 @@
raise appinfo_errors.EmptyConfigurationFile()
if len(app_infos) > 1:
raise appinfo_errors.MultipleConfigurationFile()
+ app_infos[0].FixSecureDefaults()
return app_infos[0]
--- a/thirdparty/google_appengine/google/appengine/api/datastore.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/datastore.py Fri Oct 23 13:54:11 2009 -0500
@@ -42,6 +42,7 @@
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import capabilities
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.datastore import datastore_index
@@ -50,14 +51,23 @@
from google.appengine.datastore import entity_pb
try:
- from google.appengine.api.labs.taskqueue import taskqueue_service_pb
+ __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb')
+ taskqueue_service_pb = sys.modules.get(
+ 'google.appengine.api.labs.taskqueue.taskqueue_service_pb')
except ImportError:
from google.appengine.api.taskqueue import taskqueue_service_pb
MAX_ALLOWABLE_QUERIES = 30
+MAXIMUM_RESULTS = 1000
+
DEFAULT_TRANSACTION_RETRIES = 3
+READ_CAPABILITY = capabilities.CapabilitySet('datastore_v3')
+WRITE_CAPABILITY = capabilities.CapabilitySet(
+ 'datastore_v3',
+ capabilities=['write'])
+
_MAX_INDEXED_PROPERTIES = 5000
_MAX_ID_BATCH_SIZE = 1000 * 1000 * 1000
@@ -609,7 +619,8 @@
unindexed_properties = [p.name() for p in pb.raw_property_list()]
e = Entity(unicode(last_path.type().decode('utf-8')),
- unindexed_properties=unindexed_properties)
+ unindexed_properties=unindexed_properties,
+ _app=pb.key().app())
ref = e.__key._Key__reference
ref.CopyFrom(pb.key())
@@ -751,6 +762,9 @@
__cached_count = None
__hint = None
__ancestor = None
+ __compile = None
+
+ __cursor = None
__filter_order = None
__filter_counter = 0
@@ -759,7 +773,7 @@
__inequality_count = 0
def __init__(self, kind=None, filters={}, _app=None, keys_only=False,
- _namespace=None):
+ compile=True, cursor=None, _namespace=None):
"""Constructor.
Raises BadArgumentError if kind is not a string. Raises BadValueError or
@@ -784,6 +798,8 @@
self.__app = datastore_types.ResolveAppIdNamespace(_app,
_namespace).to_encoded()
self.__keys_only = keys_only
+ self.__compile = compile
+ self.__cursor = cursor
def Order(self, *orderings):
"""Specify how the query results should be sorted.
@@ -935,6 +951,14 @@
"""Returns True if this query is keys only, false otherwise."""
return self.__keys_only
+ def GetCompiledQuery(self):
+ try:
+ return self.__compiled_query
+ except AttributeError:
+ raise AssertionError('No cursor available, either this query has not '
+ 'been executed or there is no compilation '
+ 'available for this kind of query')
+
def Run(self):
"""Runs this query.
@@ -949,6 +973,7 @@
# an iterator that provides access to the query results
Iterator
"""
+ self.__compile = False
return self._Run()
def _Run(self, limit=None, offset=None,
@@ -963,18 +988,28 @@
"""
pb = self._ToPb(limit, offset, prefetch_count)
result = datastore_pb.QueryResult()
+ api_call = 'RunQuery'
+ if self.__cursor:
+ pb = self._ToCompiledPb(pb, self.__cursor, prefetch_count)
+ api_call = 'RunCompiledQuery'
try:
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', pb, result)
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', api_call, pb, result)
except apiproxy_errors.ApplicationError, err:
try:
- _ToDatastoreError(err)
+ raise _ToDatastoreError(err)
except datastore_errors.NeedIndexError, exc:
yaml = datastore_index.IndexYamlForQuery(
*datastore_index.CompositeIndexForQuery(pb)[1:-1])
raise datastore_errors.NeedIndexError(
str(exc) + '\nThis query needs this index:\n' + yaml)
+ if self.__compile:
+ if result.has_compiled_query():
+ self.__compiled_query = result.compiled_query()
+ else:
+ self.__compiled_query = None
+
return Iterator(result, batch_size=next_count)
def Get(self, limit, offset=0):
@@ -1039,6 +1074,7 @@
Returns:
The number of results.
"""
+ self.__compile = False
if self.__cached_count:
return self.__cached_count
@@ -1228,6 +1264,8 @@
pb.set_keys_only(bool(self.__keys_only))
if self.__app:
pb.set_app(self.__app.encode('utf-8'))
+ if self.__compile:
+ pb.set_compile(True)
if limit is not None:
pb.set_limit(limit)
if offset is not None:
@@ -1273,6 +1311,13 @@
return pb
+ def _ToCompiledPb(self, query_pb, cursor, count=None):
+ compiled_pb = datastore_pb.RunCompiledQueryRequest()
+ compiled_pb.mutable_original_query().CopyFrom(query_pb)
+ compiled_pb.mutable_compiled_query().CopyFrom(cursor)
+ if count is not None:
+ compiled_pb.set_count(count)
+ return compiled_pb
def AllocateIds(model_key, size):
"""Allocates a range of IDs of size for the key defined by model_key
@@ -1298,9 +1343,12 @@
if size > _MAX_ID_BATCH_SIZE:
raise datastore_errors.BadArgumentError(
'Cannot allocate more than %s ids at a time' % _MAX_ID_BATCH_SIZE)
+ if size <= 0:
+ raise datastore_errors.BadArgumentError(
+ 'Cannot allocate less than 1 id')
req = datastore_pb.AllocateIdsRequest()
- req.mutable_model_key().CopyFrom(keys[0]._Key__reference)
+ req.mutable_model_key().CopyFrom(keys[0]._ToPb())
req.set_size(size)
resp = datastore_pb.AllocateIdsResponse()
@@ -1338,6 +1386,7 @@
self.__bound_queries = bound_queries
self.__orderings = orderings
+ self.__compile = False
def __str__(self):
res = 'MultiQuery: '
@@ -1452,11 +1501,11 @@
def __GetValueForId(self, sort_order_entity, identifier, sort_order):
value = _GetPropertyValue(sort_order_entity.__entity, identifier)
- entity_key = sort_order_entity.__entity.key()
- if (entity_key, identifier) in self.__min_max_value_cache:
- value = self.__min_max_value_cache[(entity_key, identifier)]
- elif isinstance(value, list):
- if sort_order == Query.DESCENDING:
+ if isinstance(value, list):
+ entity_key = sort_order_entity.__entity.key()
+ if (entity_key, identifier) in self.__min_max_value_cache:
+ value = self.__min_max_value_cache[(entity_key, identifier)]
+ elif sort_order == Query.DESCENDING:
value = min(value)
else:
value = max(value)
@@ -1562,6 +1611,10 @@
else:
return len(self.Get(limit))
+ def GetCompiledQuery(self):
+ raise AssertionError('No cursor available for a MultiQuery (queries '
+ 'using "IN" or "!=" operators)')
+
def __setitem__(self, query_filter, value):
"""Add a new filter by setting it on all subqueries.
@@ -1623,6 +1676,7 @@
return iter(self.__bound_queries)
+
class Iterator(object):
"""An iterator over the results of a datastore query.
@@ -1667,9 +1721,11 @@
# a list of entities or keys
[Entity or Key, ...]
"""
+ if count > MAXIMUM_RESULTS:
+ count = MAXIMUM_RESULTS
entity_list = self._Next(count)
while len(entity_list) < count and self.__more_results:
- next_results = self._Next(count - len(entity_list), self.__batch_size)
+ next_results = self._Next(count - len(entity_list))
if not next_results:
break
entity_list += next_results
@@ -2165,6 +2221,6 @@
}
if err.application_error in errors:
- raise errors[err.application_error](err.error_detail)
+ return errors[err.application_error](err.error_detail)
else:
- raise datastore_errors.Error(err.error_detail)
+ return datastore_errors.Error(err.error_detail)
--- a/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py Fri Oct 23 13:54:11 2009 -0500
@@ -98,7 +98,6 @@
self.native = datastore.Entity._FromPb(entity)
-
class _Cursor(object):
"""A query cursor.
@@ -110,6 +109,7 @@
Class attributes:
_next_cursor: the next cursor to allocate
_next_cursor_lock: protects _next_cursor
+ _offset: the internal index for where we are in the results
"""
_next_cursor = 1
_next_cursor_lock = threading.Lock()
@@ -118,13 +118,15 @@
"""Constructor.
Args:
- # the query results, in order, such that pop(0) is the next result
+ # the query results, in order, such that results[self.offset:] is
+ # the next result
results: list of entity_pb.EntityProto
keys_only: integer
"""
self.__results = results
self.count = len(results)
self.keys_only = keys_only
+ self._offset = 0
self._next_cursor_lock.acquire()
try:
@@ -133,22 +135,54 @@
finally:
self._next_cursor_lock.release()
- def PopulateQueryResult(self, result, count):
+ def PopulateQueryResult(self, result, count, offset=None, compiled=False):
"""Populates a QueryResult with this cursor and the given number of results.
Args:
result: datastore_pb.QueryResult
count: integer
+ offset: integer, overrides the internal offset
+ compiled: boolean, whether we are compiling this query
"""
+ _offset = offset
+ if offset is None:
+ _offset = self._offset
+ if count > _MAXIMUM_RESULTS:
+ count = _MAXIMUM_RESULTS
+
result.mutable_cursor().set_cursor(self.cursor)
result.set_keys_only(self.keys_only)
- results_pbs = [r._ToPb() for r in self.__results[:count]]
+ self.count = len(self.__results[_offset:_offset + count])
+
+ results_pbs = [r._ToPb() for r in self.__results[_offset:_offset + count]]
result.result_list().extend(results_pbs)
- del self.__results[:count]
+
+ if offset is None:
+ self._offset += self.count
+
+ result.set_more_results(len(self.__results[_offset + self.count:]) > 0)
+ if compiled and result.more_results():
+ compiled_query = _FakeCompiledQuery(cursor=self.cursor,
+ offset=_offset + self.count,
+ keys_only=self.keys_only)
+ result.mutable_compiled_query().CopyFrom(compiled_query._ToPb())
+
- result.set_more_results(len(self.__results) > 0)
+class _FakeCompiledQuery(object):
+ def __init__(self, cursor, offset, keys_only=False):
+ self.cursor = cursor
+ self.offset = offset
+ self.keys_only = keys_only
+ def _ToPb(self):
+ compiled_pb = datastore_pb.CompiledQuery()
+ compiled_pb.mutable_primaryscan()
+ compiled_pb.set_keys_only(self.keys_only)
+
+ compiled_pb.set_limit(self.cursor)
+ compiled_pb.set_offset(self.offset)
+ return compiled_pb
class DatastoreFileStub(apiproxy_stub.APIProxyStub):
""" Persistent stub for the Python datastore API.
@@ -196,7 +230,7 @@
def __init__(self,
app_id,
datastore_file,
- history_file,
+ history_file=None,
require_indexes=False,
service_name='datastore_v3',
trusted=False):
@@ -208,8 +242,7 @@
app_id: string
datastore_file: string, stores all entities across sessions. Use None
not to use a file.
- history_file: string, stores query history. Use None as with
- datastore_file.
+ history_file: DEPRECATED. No-op.
require_indexes: bool, default False. If True, composite indexes must
exist in index.yaml for queries that need them.
service_name: Service name expected for all calls.
@@ -222,7 +255,6 @@
assert isinstance(app_id, basestring) and app_id != ''
self.__app_id = app_id
self.__datastore_file = datastore_file
- self.__history_file = history_file
self.SetTrusted(trusted)
self.__entities = {}
@@ -380,25 +412,11 @@
if last_path.has_id() and last_path.id() >= self.__next_id:
self.__next_id = last_path.id() + 1
- self.__query_history = {}
- for encoded_query, count in self.__ReadPickled(self.__history_file):
- try:
- query_pb = datastore_pb.Query(encoded_query)
- except self.READ_PB_EXCEPTIONS, e:
- raise datastore_errors.InternalError(self.READ_ERROR_MSG %
- (self.__history_file, e))
-
- if query_pb in self.__query_history:
- self.__query_history[query_pb] += count
- else:
- self.__query_history[query_pb] = count
-
def Write(self):
""" Writes out the datastore and history files. Be careful! If the files
already exist, this method overwrites them!
"""
self.__WriteDatastore()
- self.__WriteHistory()
def __WriteDatastore(self):
""" Writes out the datastore file. Be careful! If the file already exist,
@@ -412,16 +430,6 @@
self.__WritePickled(encoded, self.__datastore_file)
- def __WriteHistory(self):
- """ Writes out the history file. Be careful! If the file already exist,
- this method overwrites it!
- """
- if self.__history_file and self.__history_file != '/dev/null':
- encoded = [(query.Encode(), count)
- for query, count in self.__query_history.items()]
-
- self.__WritePickled(encoded, self.__history_file)
-
def __ReadPickled(self, filename):
"""Reads a pickled object from the given file and returns it.
"""
@@ -583,7 +591,7 @@
self.__entities_lock.release()
- def _Dynamic_RunQuery(self, query, query_result):
+ def _Dynamic_RunQuery(self, query, query_result, count=None):
if not self.__tx_lock.acquire(False):
if not query.has_ancestor():
raise apiproxy_errors.ApplicationError(
@@ -805,7 +813,7 @@
limit = query.limit()
if limit > _MAXIMUM_RESULTS:
limit = _MAXIMUM_RESULTS
- results = results[offset:limit + offset]
+ results = results[offset:]
clone = datastore_pb.Query()
clone.CopyFrom(query)
@@ -814,19 +822,35 @@
self.__query_history[clone] += 1
else:
self.__query_history[clone] = 1
- self.__WriteHistory()
cursor = _Cursor(results, query.keys_only())
self.__queries[cursor.cursor] = cursor
- if query.has_count():
- count = query.count()
- elif query.has_limit():
- count = query.limit()
- else:
- count = _BATCH_SIZE
+ if count is None:
+ if query.has_count():
+ count = query.count()
+ elif query.has_limit():
+ count = query.limit()
+ else:
+ count = _BATCH_SIZE
+
+ cursor.PopulateQueryResult(query_result, count, compiled=query.compile())
- cursor.PopulateQueryResult(query_result, count)
+ def _Dynamic_RunCompiledQuery(self, compiled_request, query_result):
+ cursor_handle = compiled_request.compiled_query().limit()
+ cursor_offset = compiled_request.compiled_query().offset()
+
+ try:
+ cursor = self.__queries[cursor_handle]
+ except KeyError:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST, 'Cursor %d not found' % cursor_handle)
+
+ count = _BATCH_SIZE
+ if compiled_request.has_count():
+ count = compiled_request.count()
+ cursor.PopulateQueryResult(
+ query_result, count, cursor_offset, compiled=True)
def _Dynamic_Next(self, next_request, query_result):
cursor_handle = next_request.cursor().cursor()
@@ -845,7 +869,10 @@
def _Dynamic_Count(self, query, integer64proto):
self.__ValidateAppId(query.app())
query_result = datastore_pb.QueryResult()
- self._Dynamic_RunQuery(query, query_result)
+ count = _MAXIMUM_RESULTS
+ if query.has_limit():
+ count = query.limit()
+ self._Dynamic_RunQuery(query, query_result, count=count)
cursor = query_result.cursor().cursor()
integer64proto.set_value(self.__queries[cursor].count)
del self.__queries[cursor]
--- a/thirdparty/google_appengine/google/appengine/api/datastore_types.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_types.py Fri Oct 23 13:54:11 2009 -0500
@@ -325,6 +325,7 @@
# a base64-encoded primary key, generated by Key.__str__
encoded: str
"""
+ self._str = None
if encoded is not None:
if not isinstance(encoded, basestring):
try:
@@ -339,10 +340,13 @@
if modulo != 0:
encoded += ('=' * (4 - modulo))
- encoded_pb = base64.urlsafe_b64decode(str(encoded))
+ self._str = str(encoded)
+ encoded_pb = base64.urlsafe_b64decode(self._str)
self.__reference = entity_pb.Reference(encoded_pb)
assert self.__reference.IsInitialized()
+ self._str = self._str.rstrip('=')
+
except (AssertionError, TypeError), e:
raise datastore_errors.BadKeyError(
'Invalid string key %s. Details: %s' % (encoded, e))
@@ -354,7 +358,7 @@
else:
self.__reference = entity_pb.Reference()
- def to_path(self):
+ def to_path(self, _default_id=None):
"""Construct the "path" of this key as a list.
Returns:
@@ -363,6 +367,7 @@
Raises:
datastore_errors.BadKeyError if this key does not have a valid path.
"""
+
path = []
for path_element in self.__reference.path().element_list():
path.append(path_element.type().decode('utf-8'))
@@ -370,6 +375,8 @@
path.append(path_element.name().decode('utf-8'))
elif path_element.has_id():
path.append(path_element.id())
+ elif _default_id is not None:
+ path.append(_default_id)
else:
raise datastore_errors.BadKeyError('Incomplete key found in to_path')
return path
@@ -631,12 +638,15 @@
Returns:
string
"""
- if (self.has_id_or_name()):
- encoded = base64.urlsafe_b64encode(self.__reference.Encode())
- return encoded.replace('=', '')
- else:
- raise datastore_errors.BadKeyError(
- 'Cannot string encode an incomplete key!\n%s' % self.__reference)
+ if self._str is None:
+ if (self.has_id_or_name()):
+ encoded = base64.urlsafe_b64encode(self.__reference.Encode())
+ self._str = encoded.replace('=', '')
+ else:
+ raise datastore_errors.BadKeyError(
+ 'Cannot string encode an incomplete key!\n%s' % self.__reference)
+ return self._str
+
def __repr__(self):
"""Returns an eval()able string representation of this key.
@@ -675,25 +685,11 @@
if not isinstance(other, Key):
return -2
- self_args = []
- other_args = []
-
- self_args.append(self.__reference.app())
- other_args.append(other.__reference.app())
+ self_args = [self.__reference.app()]
+ self_args += self.to_path(_default_id=0)
- for elem in self.__reference.path().element_list():
- self_args.append(elem.type())
- if elem.has_name():
- self_args.append(elem.name())
- else:
- self_args.append(elem.id())
-
- for elem in other.__reference.path().element_list():
- other_args.append(elem.type())
- if elem.has_name():
- other_args.append(elem.name())
- else:
- other_args.append(elem.id())
+ other_args = [other.__reference.app()]
+ other_args += other.to_path(_default_id=0)
for self_component, other_component in zip(self_args, other_args):
comparison = cmp(self_component, other_component)
@@ -711,7 +707,9 @@
Returns:
int
"""
- return hash(self.__str__())
+ args = self.to_path(_default_id=0)
+ args.append(self.__reference.app())
+ return hash(type(args)) ^ hash(tuple(args))
class Category(unicode):
@@ -900,12 +898,12 @@
def __init__(self, protocol, address=None):
if address is None:
try:
- split = protocol.split(' ')
+ split = protocol.split(' ', 1)
protocol, address = split
except (AttributeError, ValueError):
raise datastore_errors.BadValueError(
'Expected string of format "protocol address"; received %s' %
- str(protocol))
+ (protocol,))
ValidateString(address, 'address')
if protocol not in self.PROTOCOLS:
--- a/thirdparty/google_appengine/google/appengine/api/images/__init__.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/images/__init__.py Fri Oct 23 13:54:11 2009 -0500
@@ -85,22 +85,30 @@
"""The image data given is too large to process."""
+class InvalidBlobKeyError(Error):
+ """The provided blob key was invalid."""
+
+
class Image(object):
"""Image object to manipulate."""
- def __init__(self, image_data):
+ def __init__(self, image_data=None, blob_key=None):
"""Constructor.
Args:
image_data: str, image data in string form.
+ blob_key: str, image data as a blobstore blob key.
Raises:
NotImageError if the given data is empty.
"""
- if not image_data:
+ if not image_data and not blob_key:
raise NotImageError("Empty image data.")
+ if image_data and blob_key:
+ raise NotImageError("Can only take one image or blob key.")
self._image_data = image_data
+ self._blob_key = blob_key
self._transforms = []
self._width = None
self._height = None
@@ -123,6 +131,8 @@
NotImageError if the image data is not an image.
BadImageError if the image data is corrupt.
"""
+ if not self._image_data:
+ raise NotImageError("Dimensions unavailable for blob key input")
size = len(self._image_data)
if size >= 6 and self._image_data.startswith("GIF"):
self._update_gif_dimensions()
@@ -448,6 +458,18 @@
self._transforms.append(transform)
+ def _set_imagedata(self, imagedata):
+ """Fills in an ImageData PB from this Image instance.
+
+ Args:
+ imagedata: An ImageData PB instance
+ """
+ if self._blob_key:
+ imagedata.set_content("")
+ imagedata.set_blob_key(self._blob_key)
+ else:
+ imagedata.set_content(self._image_data)
+
def execute_transforms(self, output_encoding=PNG):
"""Perform transformations on given image.
@@ -463,6 +485,7 @@
NotImageError when the image data given is not an image.
BadImageError when the image data given is corrupt.
LargeImageError when the image data given is too large to process.
+ InvalidBlobKeyError when the blob key provided is invalid.
TransformtionError when something errors during image manipulation.
Error when something unknown, but bad, happens.
"""
@@ -476,7 +499,7 @@
request = images_service_pb.ImagesTransformRequest()
response = images_service_pb.ImagesTransformResponse()
- request.mutable_image().set_content(self._image_data)
+ self._set_imagedata(request.mutable_image())
for transform in self._transforms:
request.add_transform().CopyFrom(transform)
@@ -502,12 +525,16 @@
images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
raise LargeImageError()
elif (e.application_error ==
+ images_service_pb.ImagesServiceError.INVALID_BLOB_KEY):
+ raise InvalidBlobKeyError()
+ elif (e.application_error ==
images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR):
raise TransformationError()
else:
raise Error()
self._image_data = response.image().content()
+ self._blob_key = None
self._transforms = []
self._width = None
self._height = None
@@ -545,7 +572,8 @@
request = images_service_pb.ImagesHistogramRequest()
response = images_service_pb.ImagesHistogramResponse()
- request.mutable_image().set_content(self._image_data)
+ self._set_imagedata(request.mutable_image())
+
try:
apiproxy_stub_map.MakeSyncCall("images",
"Histogram",
@@ -561,6 +589,9 @@
elif (e.application_error ==
images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
raise LargeImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.INVALID_BLOB_KEY):
+ raise InvalidBlobKeyError()
else:
raise Error()
histogram = response.histogram()
@@ -768,7 +799,11 @@
(anchor, ANCHOR_TYPES))
if image not in image_map:
image_map[image] = request.image_size()
- request.add_image().set_content(image)
+
+ if isinstance(image, Image):
+ image._set_imagedata(request.add_image())
+ else:
+ request.add_image().set_content(image)
option = request.add_options()
option.set_x_offset(x)
@@ -801,6 +836,9 @@
images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
raise LargeImageError()
elif (e.application_error ==
+ images_service_pb.ImagesServiceError.INVALID_BLOB_KEY):
+ raise InvalidBlobKeyError()
+ elif (e.application_error ==
images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR):
raise TransformationError()
else:
--- a/thirdparty/google_appengine/google/appengine/api/images/images_service_pb.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/images/images_service_pb.py Fri Oct 23 13:54:11 2009 -0500
@@ -29,6 +29,7 @@
NOT_IMAGE = 3
BAD_IMAGE_DATA = 4
IMAGE_TOO_LARGE = 5
+ INVALID_BLOB_KEY = 6
_ErrorCode_NAMES = {
1: "UNSPECIFIED_ERROR",
@@ -36,6 +37,7 @@
3: "NOT_IMAGE",
4: "BAD_IMAGE_DATA",
5: "IMAGE_TOO_LARGE",
+ 6: "INVALID_BLOB_KEY",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -522,6 +524,8 @@
class ImageData(ProtocolBuffer.ProtocolMessage):
has_content_ = 0
content_ = ""
+ has_blob_key_ = 0
+ blob_key_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
@@ -539,15 +543,31 @@
def has_content(self): return self.has_content_
+ def blob_key(self): return self.blob_key_
+
+ def set_blob_key(self, x):
+ self.has_blob_key_ = 1
+ self.blob_key_ = x
+
+ def clear_blob_key(self):
+ if self.has_blob_key_:
+ self.has_blob_key_ = 0
+ self.blob_key_ = ""
+
+ def has_blob_key(self): return self.has_blob_key_
+
def MergeFrom(self, x):
assert x is not self
if (x.has_content()): self.set_content(x.content())
+ if (x.has_blob_key()): self.set_blob_key(x.blob_key())
def Equals(self, x):
if x is self: return 1
if self.has_content_ != x.has_content_: return 0
if self.has_content_ and self.content_ != x.content_: return 0
+ if self.has_blob_key_ != x.has_blob_key_: return 0
+ if self.has_blob_key_ and self.blob_key_ != x.blob_key_: return 0
return 1
def IsInitialized(self, debug_strs=None):
@@ -561,14 +581,19 @@
def ByteSize(self):
n = 0
n += self.lengthString(len(self.content_))
+ if (self.has_blob_key_): n += 1 + self.lengthString(len(self.blob_key_))
return n + 1
def Clear(self):
self.clear_content()
+ self.clear_blob_key()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.content_)
+ if (self.has_blob_key_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.blob_key_)
def TryMerge(self, d):
while d.avail() > 0:
@@ -576,6 +601,9 @@
if tt == 10:
self.set_content(d.getPrefixedString())
continue
+ if tt == 18:
+ self.set_blob_key(d.getPrefixedString())
+ continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
@@ -583,6 +611,7 @@
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_content_: res+=prefix+("content: %s\n" % self.DebugFormatString(self.content_))
+ if self.has_blob_key_: res+=prefix+("blob_key: %s\n" % self.DebugFormatString(self.blob_key_))
return res
@@ -590,16 +619,19 @@
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcontent = 1
+ kblob_key = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "content",
- }, 1)
+ 2: "blob_key",
+ }, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
- }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py Fri Oct 23 13:54:11 2009 -0500
@@ -109,6 +109,10 @@
"""The requested operation is not allowed for this app."""
+class DatastoreError(Error):
+ """There was a datastore error while accessing the queue."""
+
+
MAX_QUEUE_NAME_LENGTH = 100
MAX_TASK_NAME_LENGTH = 500
@@ -480,7 +484,7 @@
"""
return self.__enqueued
- def add(self, queue_name=_DEFAULT_QUEUE, transactional=True):
+ def add(self, queue_name=_DEFAULT_QUEUE, transactional=False):
"""Adds this Task to a queue. See Queue.add."""
return Queue(queue_name).add(self, transactional=transactional)
@@ -504,7 +508,7 @@
self.__name = name
self.__url = '%s/%s' % (_DEFAULT_QUEUE_PATH, self.__name)
- def add(self, task, transactional=True):
+ def add(self, task, transactional=False):
"""Adds a Task to this Queue.
Args:
@@ -551,6 +555,9 @@
from google.appengine.api import datastore
datastore._MaybeSetupTransaction(request, [])
+ if request.has_transaction() and task.name:
+ raise InvalidTaskNameError('Task bound to a transaction cannot be named.')
+
call_tuple = ('taskqueue', 'Add', request, response)
apiproxy_stub_map.apiproxy.GetPreCallHooks().Call(*call_tuple)
try:
@@ -616,6 +623,22 @@
elif (error.application_error ==
taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA):
raise InvalidTaskError(error.error_detail)
+ elif ((error.application_error >=
+ taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR) and
+ isinstance(error.application_error, int)):
+ from google.appengine.api import datastore
+ error.application_error = (error.application_error -
+ taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR)
+ datastore_exception = datastore._ToDatastoreError(error)
+
+ class JointException(datastore_exception.__class__, DatastoreError):
+ """There was a datastore error while accessing the queue."""
+ __msg = (u'taskqueue.DatastoreError caused by: %s %s' %
+ (datastore_exception.__class__, error.error_detail))
+ def __str__(self):
+ return JointException.__msg
+
+ raise JointException
else:
raise Error('Application error %s: %s' %
(error.application_error, error.error_detail))
@@ -630,4 +653,5 @@
Returns:
The Task that was added to the queue.
"""
- return Task(*args, **kwargs).add()
+ transactional = kwargs.pop('transactional', False)
+ return Task(*args, **kwargs).add(transactional=transactional)
--- a/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py Fri Oct 23 13:54:11 2009 -0500
@@ -39,6 +39,8 @@
TOMBSTONED_TASK = 11
INVALID_ETA = 12
INVALID_REQUEST = 13
+ UNKNOWN_TASK = 14
+ DATASTORE_ERROR = 10000
_ErrorCode_NAMES = {
0: "OK",
@@ -55,6 +57,8 @@
11: "TOMBSTONED_TASK",
12: "INVALID_ETA",
13: "INVALID_REQUEST",
+ 14: "UNKNOWN_TASK",
+ 10000: "DATASTORE_ERROR",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -646,6 +650,247 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+class TaskQueueDeleteRequest(ProtocolBuffer.ProtocolMessage):
+ has_queue_name_ = 0
+ queue_name_ = ""
+ has_app_id_ = 0
+ app_id_ = ""
+
+ def __init__(self, contents=None):
+ self.task_name_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def queue_name(self): return self.queue_name_
+
+ def set_queue_name(self, x):
+ self.has_queue_name_ = 1
+ self.queue_name_ = x
+
+ def clear_queue_name(self):
+ if self.has_queue_name_:
+ self.has_queue_name_ = 0
+ self.queue_name_ = ""
+
+ def has_queue_name(self): return self.has_queue_name_
+
+ def task_name_size(self): return len(self.task_name_)
+ def task_name_list(self): return self.task_name_
+
+ def task_name(self, i):
+ return self.task_name_[i]
+
+ def set_task_name(self, i, x):
+ self.task_name_[i] = x
+
+ def add_task_name(self, x):
+ self.task_name_.append(x)
+
+ def clear_task_name(self):
+ self.task_name_ = []
+
+ def app_id(self): return self.app_id_
+
+ def set_app_id(self, x):
+ self.has_app_id_ = 1
+ self.app_id_ = x
+
+ def clear_app_id(self):
+ if self.has_app_id_:
+ self.has_app_id_ = 0
+ self.app_id_ = ""
+
+ def has_app_id(self): return self.has_app_id_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_queue_name()): self.set_queue_name(x.queue_name())
+ for i in xrange(x.task_name_size()): self.add_task_name(x.task_name(i))
+ if (x.has_app_id()): self.set_app_id(x.app_id())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_queue_name_ != x.has_queue_name_: return 0
+ if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
+ if len(self.task_name_) != len(x.task_name_): return 0
+ for e1, e2 in zip(self.task_name_, x.task_name_):
+ if e1 != e2: return 0
+ if self.has_app_id_ != x.has_app_id_: return 0
+ if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_queue_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: queue_name not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.queue_name_))
+ n += 1 * len(self.task_name_)
+ for i in xrange(len(self.task_name_)): n += self.lengthString(len(self.task_name_[i]))
+ if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_queue_name()
+ self.clear_task_name()
+ self.clear_app_id()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.queue_name_)
+ for i in xrange(len(self.task_name_)):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.task_name_[i])
+ if (self.has_app_id_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.app_id_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_queue_name(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.add_task_name(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_app_id(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
+ cnt=0
+ for e in self.task_name_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("task_name%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kqueue_name = 1
+ ktask_name = 2
+ kapp_id = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "queue_name",
+ 2: "task_name",
+ 3: "app_id",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueDeleteResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.result_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def result_size(self): return len(self.result_)
+ def result_list(self): return self.result_
+
+ def result(self, i):
+ return self.result_[i]
+
+ def set_result(self, i, x):
+ self.result_[i] = x
+
+ def add_result(self, x):
+ self.result_.append(x)
+
+ def clear_result(self):
+ self.result_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.result_size()): self.add_result(x.result(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.result_) != len(x.result_): return 0
+ for e1, e2 in zip(self.result_, x.result_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.result_)
+ for i in xrange(len(self.result_)): n += self.lengthVarInt64(self.result_[i])
+ return n + 0
+
+ def Clear(self):
+ self.clear_result()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.result_)):
+ out.putVarInt32(24)
+ out.putVarInt32(self.result_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 24:
+ self.add_result(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.result_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("result%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kresult = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 3: "result",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
class TaskQueueUpdateQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -1443,13 +1688,165 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+class TaskQueueScannerQueueInfo(ProtocolBuffer.ProtocolMessage):
+ has_executed_last_minute_ = 0
+ executed_last_minute_ = 0
+ has_executed_last_hour_ = 0
+ executed_last_hour_ = 0
+ has_sampling_duration_seconds_ = 0
+ sampling_duration_seconds_ = 0.0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def executed_last_minute(self): return self.executed_last_minute_
+
+ def set_executed_last_minute(self, x):
+ self.has_executed_last_minute_ = 1
+ self.executed_last_minute_ = x
+
+ def clear_executed_last_minute(self):
+ if self.has_executed_last_minute_:
+ self.has_executed_last_minute_ = 0
+ self.executed_last_minute_ = 0
+
+ def has_executed_last_minute(self): return self.has_executed_last_minute_
+
+ def executed_last_hour(self): return self.executed_last_hour_
+
+ def set_executed_last_hour(self, x):
+ self.has_executed_last_hour_ = 1
+ self.executed_last_hour_ = x
+
+ def clear_executed_last_hour(self):
+ if self.has_executed_last_hour_:
+ self.has_executed_last_hour_ = 0
+ self.executed_last_hour_ = 0
+
+ def has_executed_last_hour(self): return self.has_executed_last_hour_
+
+ def sampling_duration_seconds(self): return self.sampling_duration_seconds_
+
+ def set_sampling_duration_seconds(self, x):
+ self.has_sampling_duration_seconds_ = 1
+ self.sampling_duration_seconds_ = x
+
+ def clear_sampling_duration_seconds(self):
+ if self.has_sampling_duration_seconds_:
+ self.has_sampling_duration_seconds_ = 0
+ self.sampling_duration_seconds_ = 0.0
+
+ def has_sampling_duration_seconds(self): return self.has_sampling_duration_seconds_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_executed_last_minute()): self.set_executed_last_minute(x.executed_last_minute())
+ if (x.has_executed_last_hour()): self.set_executed_last_hour(x.executed_last_hour())
+ if (x.has_sampling_duration_seconds()): self.set_sampling_duration_seconds(x.sampling_duration_seconds())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_executed_last_minute_ != x.has_executed_last_minute_: return 0
+ if self.has_executed_last_minute_ and self.executed_last_minute_ != x.executed_last_minute_: return 0
+ if self.has_executed_last_hour_ != x.has_executed_last_hour_: return 0
+ if self.has_executed_last_hour_ and self.executed_last_hour_ != x.executed_last_hour_: return 0
+ if self.has_sampling_duration_seconds_ != x.has_sampling_duration_seconds_: return 0
+ if self.has_sampling_duration_seconds_ and self.sampling_duration_seconds_ != x.sampling_duration_seconds_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_executed_last_minute_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: executed_last_minute not set.')
+ if (not self.has_executed_last_hour_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: executed_last_hour not set.')
+ if (not self.has_sampling_duration_seconds_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: sampling_duration_seconds not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.executed_last_minute_)
+ n += self.lengthVarInt64(self.executed_last_hour_)
+ return n + 11
+
+ def Clear(self):
+ self.clear_executed_last_minute()
+ self.clear_executed_last_hour()
+ self.clear_sampling_duration_seconds()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt64(self.executed_last_minute_)
+ out.putVarInt32(16)
+ out.putVarInt64(self.executed_last_hour_)
+ out.putVarInt32(25)
+ out.putDouble(self.sampling_duration_seconds_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_executed_last_minute(d.getVarInt64())
+ continue
+ if tt == 16:
+ self.set_executed_last_hour(d.getVarInt64())
+ continue
+ if tt == 25:
+ self.set_sampling_duration_seconds(d.getDouble())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_executed_last_minute_: res+=prefix+("executed_last_minute: %s\n" % self.DebugFormatInt64(self.executed_last_minute_))
+ if self.has_executed_last_hour_: res+=prefix+("executed_last_hour: %s\n" % self.DebugFormatInt64(self.executed_last_hour_))
+ if self.has_sampling_duration_seconds_: res+=prefix+("sampling_duration_seconds: %s\n" % self.DebugFormat(self.sampling_duration_seconds_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kexecuted_last_minute = 1
+ kexecuted_last_hour = 2
+ ksampling_duration_seconds = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "executed_last_minute",
+ 2: "executed_last_hour",
+ 3: "sampling_duration_seconds",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.DOUBLE,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
class TaskQueueFetchQueueStatsResponse_QueueStats(ProtocolBuffer.ProtocolMessage):
has_num_tasks_ = 0
num_tasks_ = 0
has_oldest_eta_usec_ = 0
oldest_eta_usec_ = 0
+ has_scanner_info_ = 0
+ scanner_info_ = None
def __init__(self, contents=None):
+ self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def num_tasks(self): return self.num_tasks_
@@ -1478,11 +1875,30 @@
def has_oldest_eta_usec(self): return self.has_oldest_eta_usec_
+ def scanner_info(self):
+ if self.scanner_info_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.scanner_info_ is None: self.scanner_info_ = TaskQueueScannerQueueInfo()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.scanner_info_
+
+ def mutable_scanner_info(self): self.has_scanner_info_ = 1; return self.scanner_info()
+
+ def clear_scanner_info(self):
+ if self.has_scanner_info_:
+ self.has_scanner_info_ = 0;
+ if self.scanner_info_ is not None: self.scanner_info_.Clear()
+
+ def has_scanner_info(self): return self.has_scanner_info_
+
def MergeFrom(self, x):
assert x is not self
if (x.has_num_tasks()): self.set_num_tasks(x.num_tasks())
if (x.has_oldest_eta_usec()): self.set_oldest_eta_usec(x.oldest_eta_usec())
+ if (x.has_scanner_info()): self.mutable_scanner_info().MergeFrom(x.scanner_info())
def Equals(self, x):
if x is self: return 1
@@ -1490,6 +1906,8 @@
if self.has_num_tasks_ and self.num_tasks_ != x.num_tasks_: return 0
if self.has_oldest_eta_usec_ != x.has_oldest_eta_usec_: return 0
if self.has_oldest_eta_usec_ and self.oldest_eta_usec_ != x.oldest_eta_usec_: return 0
+ if self.has_scanner_info_ != x.has_scanner_info_: return 0
+ if self.has_scanner_info_ and self.scanner_info_ != x.scanner_info_: return 0
return 1
def IsInitialized(self, debug_strs=None):
@@ -1502,23 +1920,30 @@
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: oldest_eta_usec not set.')
+ if (self.has_scanner_info_ and not self.scanner_info_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.num_tasks_)
n += self.lengthVarInt64(self.oldest_eta_usec_)
+ if (self.has_scanner_info_): n += 1 + self.lengthString(self.scanner_info_.ByteSize())
return n + 2
def Clear(self):
self.clear_num_tasks()
self.clear_oldest_eta_usec()
+ self.clear_scanner_info()
def OutputUnchecked(self, out):
out.putVarInt32(16)
out.putVarInt32(self.num_tasks_)
out.putVarInt32(24)
out.putVarInt64(self.oldest_eta_usec_)
+ if (self.has_scanner_info_):
+ out.putVarInt32(34)
+ out.putVarInt32(self.scanner_info_.ByteSize())
+ self.scanner_info_.OutputUnchecked(out)
def TryMerge(self, d):
while 1:
@@ -1530,6 +1955,12 @@
if tt == 24:
self.set_oldest_eta_usec(d.getVarInt64())
continue
+ if tt == 34:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_scanner_info().TryMerge(tmp)
+ continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
@@ -1538,6 +1969,10 @@
res=""
if self.has_num_tasks_: res+=prefix+("num_tasks: %s\n" % self.DebugFormatInt32(self.num_tasks_))
if self.has_oldest_eta_usec_: res+=prefix+("oldest_eta_usec: %s\n" % self.DebugFormatInt64(self.oldest_eta_usec_))
+ if self.has_scanner_info_:
+ res+=prefix+"scanner_info <\n"
+ res+=self.scanner_info_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
return res
class TaskQueueFetchQueueStatsResponse(ProtocolBuffer.ProtocolMessage):
@@ -1624,22 +2059,922 @@
kQueueStatsGroup = 1
kQueueStatsnum_tasks = 2
kQueueStatsoldest_eta_usec = 3
+ kQueueStatsscanner_info = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "QueueStats",
2: "num_tasks",
3: "oldest_eta_usec",
- }, 3)
+ 4: "scanner_info",
+ }, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
- }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+ 4: ProtocolBuffer.Encoder.STRING,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueDeleteQueueRequest(ProtocolBuffer.ProtocolMessage):
+ has_app_id_ = 0
+ app_id_ = ""
+ has_queue_name_ = 0
+ queue_name_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def app_id(self): return self.app_id_
+
+ def set_app_id(self, x):
+ self.has_app_id_ = 1
+ self.app_id_ = x
+
+ def clear_app_id(self):
+ if self.has_app_id_:
+ self.has_app_id_ = 0
+ self.app_id_ = ""
+
+ def has_app_id(self): return self.has_app_id_
+
+ def queue_name(self): return self.queue_name_
+
+ def set_queue_name(self, x):
+ self.has_queue_name_ = 1
+ self.queue_name_ = x
+
+ def clear_queue_name(self):
+ if self.has_queue_name_:
+ self.has_queue_name_ = 0
+ self.queue_name_ = ""
+
+ def has_queue_name(self): return self.has_queue_name_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app_id()): self.set_app_id(x.app_id())
+ if (x.has_queue_name()): self.set_queue_name(x.queue_name())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_id_ != x.has_app_id_: return 0
+ if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+ if self.has_queue_name_ != x.has_queue_name_: return 0
+ if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app_id not set.')
+ if (not self.has_queue_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: queue_name not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_id_))
+ n += self.lengthString(len(self.queue_name_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_app_id()
+ self.clear_queue_name()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_id_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.queue_name_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app_id(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_queue_name(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+ if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp_id = 1
+ kqueue_name = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app_id",
+ 2: "queue_name",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueDeleteQueueResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueQueryTasksRequest(ProtocolBuffer.ProtocolMessage):
+ has_app_id_ = 0
+ app_id_ = ""
+ has_queue_name_ = 0
+ queue_name_ = ""
+ has_start_task_name_ = 0
+ start_task_name_ = ""
+ has_start_eta_usec_ = 0
+ start_eta_usec_ = 0
+ has_max_rows_ = 0
+ max_rows_ = 1
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def app_id(self): return self.app_id_
+
+ def set_app_id(self, x):
+ self.has_app_id_ = 1
+ self.app_id_ = x
+
+ def clear_app_id(self):
+ if self.has_app_id_:
+ self.has_app_id_ = 0
+ self.app_id_ = ""
+
+ def has_app_id(self): return self.has_app_id_
+
+ def queue_name(self): return self.queue_name_
+
+ def set_queue_name(self, x):
+ self.has_queue_name_ = 1
+ self.queue_name_ = x
+
+ def clear_queue_name(self):
+ if self.has_queue_name_:
+ self.has_queue_name_ = 0
+ self.queue_name_ = ""
+
+ def has_queue_name(self): return self.has_queue_name_
+
+ def start_task_name(self): return self.start_task_name_
+
+ def set_start_task_name(self, x):
+ self.has_start_task_name_ = 1
+ self.start_task_name_ = x
+
+ def clear_start_task_name(self):
+ if self.has_start_task_name_:
+ self.has_start_task_name_ = 0
+ self.start_task_name_ = ""
+
+ def has_start_task_name(self): return self.has_start_task_name_
+
+ def start_eta_usec(self): return self.start_eta_usec_
+
+ def set_start_eta_usec(self, x):
+ self.has_start_eta_usec_ = 1
+ self.start_eta_usec_ = x
+
+ def clear_start_eta_usec(self):
+ if self.has_start_eta_usec_:
+ self.has_start_eta_usec_ = 0
+ self.start_eta_usec_ = 0
+
+ def has_start_eta_usec(self): return self.has_start_eta_usec_
+
+ def max_rows(self): return self.max_rows_
+
+ def set_max_rows(self, x):
+ self.has_max_rows_ = 1
+ self.max_rows_ = x
+
+ def clear_max_rows(self):
+ if self.has_max_rows_:
+ self.has_max_rows_ = 0
+ self.max_rows_ = 1
+
+ def has_max_rows(self): return self.has_max_rows_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app_id()): self.set_app_id(x.app_id())
+ if (x.has_queue_name()): self.set_queue_name(x.queue_name())
+ if (x.has_start_task_name()): self.set_start_task_name(x.start_task_name())
+ if (x.has_start_eta_usec()): self.set_start_eta_usec(x.start_eta_usec())
+ if (x.has_max_rows()): self.set_max_rows(x.max_rows())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_id_ != x.has_app_id_: return 0
+ if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+ if self.has_queue_name_ != x.has_queue_name_: return 0
+ if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
+ if self.has_start_task_name_ != x.has_start_task_name_: return 0
+ if self.has_start_task_name_ and self.start_task_name_ != x.start_task_name_: return 0
+ if self.has_start_eta_usec_ != x.has_start_eta_usec_: return 0
+ if self.has_start_eta_usec_ and self.start_eta_usec_ != x.start_eta_usec_: return 0
+ if self.has_max_rows_ != x.has_max_rows_: return 0
+ if self.has_max_rows_ and self.max_rows_ != x.max_rows_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app_id not set.')
+ if (not self.has_queue_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: queue_name not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_id_))
+ n += self.lengthString(len(self.queue_name_))
+ if (self.has_start_task_name_): n += 1 + self.lengthString(len(self.start_task_name_))
+ if (self.has_start_eta_usec_): n += 1 + self.lengthVarInt64(self.start_eta_usec_)
+ if (self.has_max_rows_): n += 1 + self.lengthVarInt64(self.max_rows_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_app_id()
+ self.clear_queue_name()
+ self.clear_start_task_name()
+ self.clear_start_eta_usec()
+ self.clear_max_rows()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_id_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.queue_name_)
+ if (self.has_start_task_name_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.start_task_name_)
+ if (self.has_start_eta_usec_):
+ out.putVarInt32(32)
+ out.putVarInt64(self.start_eta_usec_)
+ if (self.has_max_rows_):
+ out.putVarInt32(40)
+ out.putVarInt32(self.max_rows_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app_id(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_queue_name(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_start_task_name(d.getPrefixedString())
+ continue
+ if tt == 32:
+ self.set_start_eta_usec(d.getVarInt64())
+ continue
+ if tt == 40:
+ self.set_max_rows(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+ if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
+ if self.has_start_task_name_: res+=prefix+("start_task_name: %s\n" % self.DebugFormatString(self.start_task_name_))
+ if self.has_start_eta_usec_: res+=prefix+("start_eta_usec: %s\n" % self.DebugFormatInt64(self.start_eta_usec_))
+ if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp_id = 1
+ kqueue_name = 2
+ kstart_task_name = 3
+ kstart_eta_usec = 4
+ kmax_rows = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app_id",
+ 2: "queue_name",
+ 3: "start_task_name",
+ 4: "start_eta_usec",
+ 5: "max_rows",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
-
-__all__ = ['TaskQueueServiceError','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats']
+class TaskQueueQueryTasksResponse_TaskHeader(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(66)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(74)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 60: break
+ if tt == 66:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 74:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
+
+ GET = 1
+ POST = 2
+ HEAD = 3
+ PUT = 4
+ DELETE = 5
+
+ _RequestMethod_NAMES = {
+ 1: "GET",
+ 2: "POST",
+ 3: "HEAD",
+ 4: "PUT",
+ 5: "DELETE",
+ }
+
+ def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
+ RequestMethod_Name = classmethod(RequestMethod_Name)
+
+ has_task_name_ = 0
+ task_name_ = ""
+ has_eta_usec_ = 0
+ eta_usec_ = 0
+ has_url_ = 0
+ url_ = ""
+ has_method_ = 0
+ method_ = 0
+ has_retry_count_ = 0
+ retry_count_ = 0
+ has_body_size_ = 0
+ body_size_ = 0
+ has_body_ = 0
+ body_ = ""
+
+ def __init__(self, contents=None):
+ self.header_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def task_name(self): return self.task_name_
+
+ def set_task_name(self, x):
+ self.has_task_name_ = 1
+ self.task_name_ = x
+
+ def clear_task_name(self):
+ if self.has_task_name_:
+ self.has_task_name_ = 0
+ self.task_name_ = ""
+
+ def has_task_name(self): return self.has_task_name_
+
+ def eta_usec(self): return self.eta_usec_
+
+ def set_eta_usec(self, x):
+ self.has_eta_usec_ = 1
+ self.eta_usec_ = x
+
+ def clear_eta_usec(self):
+ if self.has_eta_usec_:
+ self.has_eta_usec_ = 0
+ self.eta_usec_ = 0
+
+ def has_eta_usec(self): return self.has_eta_usec_
+
+ def url(self): return self.url_
+
+ def set_url(self, x):
+ self.has_url_ = 1
+ self.url_ = x
+
+ def clear_url(self):
+ if self.has_url_:
+ self.has_url_ = 0
+ self.url_ = ""
+
+ def has_url(self): return self.has_url_
+
+ def method(self): return self.method_
+
+ def set_method(self, x):
+ self.has_method_ = 1
+ self.method_ = x
+
+ def clear_method(self):
+ if self.has_method_:
+ self.has_method_ = 0
+ self.method_ = 0
+
+ def has_method(self): return self.has_method_
+
+ def retry_count(self): return self.retry_count_
+
+ def set_retry_count(self, x):
+ self.has_retry_count_ = 1
+ self.retry_count_ = x
+
+ def clear_retry_count(self):
+ if self.has_retry_count_:
+ self.has_retry_count_ = 0
+ self.retry_count_ = 0
+
+ def has_retry_count(self): return self.has_retry_count_
+
+ def header_size(self): return len(self.header_)
+ def header_list(self): return self.header_
+
+ def header(self, i):
+ return self.header_[i]
+
+ def mutable_header(self, i):
+ return self.header_[i]
+
+ def add_header(self):
+ x = TaskQueueQueryTasksResponse_TaskHeader()
+ self.header_.append(x)
+ return x
+
+ def clear_header(self):
+ self.header_ = []
+ def body_size(self): return self.body_size_
+
+ def set_body_size(self, x):
+ self.has_body_size_ = 1
+ self.body_size_ = x
+
+ def clear_body_size(self):
+ if self.has_body_size_:
+ self.has_body_size_ = 0
+ self.body_size_ = 0
+
+ def has_body_size(self): return self.has_body_size_
+
+ def body(self): return self.body_
+
+ def set_body(self, x):
+ self.has_body_ = 1
+ self.body_ = x
+
+ def clear_body(self):
+ if self.has_body_:
+ self.has_body_ = 0
+ self.body_ = ""
+
+ def has_body(self): return self.has_body_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_task_name()): self.set_task_name(x.task_name())
+ if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
+ if (x.has_url()): self.set_url(x.url())
+ if (x.has_method()): self.set_method(x.method())
+ if (x.has_retry_count()): self.set_retry_count(x.retry_count())
+ for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
+ if (x.has_body_size()): self.set_body_size(x.body_size())
+ if (x.has_body()): self.set_body(x.body())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_task_name_ != x.has_task_name_: return 0
+ if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
+ if self.has_eta_usec_ != x.has_eta_usec_: return 0
+ if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
+ if self.has_url_ != x.has_url_: return 0
+ if self.has_url_ and self.url_ != x.url_: return 0
+ if self.has_method_ != x.has_method_: return 0
+ if self.has_method_ and self.method_ != x.method_: return 0
+ if self.has_retry_count_ != x.has_retry_count_: return 0
+ if self.has_retry_count_ and self.retry_count_ != x.retry_count_: return 0
+ if len(self.header_) != len(x.header_): return 0
+ for e1, e2 in zip(self.header_, x.header_):
+ if e1 != e2: return 0
+ if self.has_body_size_ != x.has_body_size_: return 0
+ if self.has_body_size_ and self.body_size_ != x.body_size_: return 0
+ if self.has_body_ != x.has_body_: return 0
+ if self.has_body_ and self.body_ != x.body_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_task_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: task_name not set.')
+ if (not self.has_eta_usec_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: eta_usec not set.')
+ if (not self.has_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: url not set.')
+ if (not self.has_method_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: method not set.')
+ for p in self.header_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.task_name_))
+ n += self.lengthVarInt64(self.eta_usec_)
+ n += self.lengthString(len(self.url_))
+ n += self.lengthVarInt64(self.method_)
+ if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
+ n += 2 * len(self.header_)
+ for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
+ if (self.has_body_size_): n += 1 + self.lengthVarInt64(self.body_size_)
+ if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
+ return n + 4
+
+ def Clear(self):
+ self.clear_task_name()
+ self.clear_eta_usec()
+ self.clear_url()
+ self.clear_method()
+ self.clear_retry_count()
+ self.clear_header()
+ self.clear_body_size()
+ self.clear_body()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.task_name_)
+ out.putVarInt32(24)
+ out.putVarInt64(self.eta_usec_)
+ out.putVarInt32(34)
+ out.putPrefixedString(self.url_)
+ out.putVarInt32(40)
+ out.putVarInt32(self.method_)
+ if (self.has_retry_count_):
+ out.putVarInt32(48)
+ out.putVarInt32(self.retry_count_)
+ for i in xrange(len(self.header_)):
+ out.putVarInt32(59)
+ self.header_[i].OutputUnchecked(out)
+ out.putVarInt32(60)
+ if (self.has_body_size_):
+ out.putVarInt32(80)
+ out.putVarInt32(self.body_size_)
+ if (self.has_body_):
+ out.putVarInt32(90)
+ out.putPrefixedString(self.body_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_task_name(d.getPrefixedString())
+ continue
+ if tt == 24:
+ self.set_eta_usec(d.getVarInt64())
+ continue
+ if tt == 34:
+ self.set_url(d.getPrefixedString())
+ continue
+ if tt == 40:
+ self.set_method(d.getVarInt32())
+ continue
+ if tt == 48:
+ self.set_retry_count(d.getVarInt32())
+ continue
+ if tt == 59:
+ self.add_header().TryMerge(d)
+ continue
+ if tt == 80:
+ self.set_body_size(d.getVarInt32())
+ continue
+ if tt == 90:
+ self.set_body(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
+ if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
+ if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
+ if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatInt32(self.method_))
+ if self.has_retry_count_: res+=prefix+("retry_count: %s\n" % self.DebugFormatInt32(self.retry_count_))
+ cnt=0
+ for e in self.header_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Header%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_body_size_: res+=prefix+("body_size: %s\n" % self.DebugFormatInt32(self.body_size_))
+ if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
+ return res
+
+class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.task_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def task_size(self): return len(self.task_)
+ def task_list(self): return self.task_
+
+ def task(self, i):
+ return self.task_[i]
+
+ def mutable_task(self, i):
+ return self.task_[i]
+
+ def add_task(self):
+ x = TaskQueueQueryTasksResponse_Task()
+ self.task_.append(x)
+ return x
+
+ def clear_task(self):
+ self.task_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.task_size()): self.add_task().CopyFrom(x.task(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.task_) != len(x.task_): return 0
+ for e1, e2 in zip(self.task_, x.task_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.task_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.task_)
+ for i in xrange(len(self.task_)): n += self.task_[i].ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_task()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.task_)):
+ out.putVarInt32(11)
+ self.task_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_task().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.task_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Task%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kTaskGroup = 1
+ kTasktask_name = 2
+ kTasketa_usec = 3
+ kTaskurl = 4
+ kTaskmethod = 5
+ kTaskretry_count = 6
+ kTaskHeaderGroup = 7
+ kTaskHeaderkey = 8
+ kTaskHeadervalue = 9
+ kTaskbody_size = 10
+ kTaskbody = 11
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Task",
+ 2: "task_name",
+ 3: "eta_usec",
+ 4: "url",
+ 5: "method",
+ 6: "retry_count",
+ 7: "Header",
+ 8: "key",
+ 9: "value",
+ 10: "body_size",
+ 11: "body",
+ }, 11)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ 6: ProtocolBuffer.Encoder.NUMERIC,
+ 7: ProtocolBuffer.Encoder.STARTGROUP,
+ 8: ProtocolBuffer.Encoder.STRING,
+ 9: ProtocolBuffer.Encoder.STRING,
+ 10: ProtocolBuffer.Encoder.NUMERIC,
+ 11: ProtocolBuffer.Encoder.STRING,
+ }, 11, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['TaskQueueServiceError','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddResponse','TaskQueueDeleteRequest','TaskQueueDeleteResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueScannerQueueInfo','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats','TaskQueueDeleteQueueRequest','TaskQueueDeleteQueueResponse','TaskQueueQueryTasksRequest','TaskQueueQueryTasksResponse','TaskQueueQueryTasksResponse_TaskHeader','TaskQueueQueryTasksResponse_Task']
--- a/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py Fri Oct 23 13:54:11 2009 -0500
@@ -30,6 +30,8 @@
import base64
import datetime
import os
+import random
+import time
import taskqueue_service_pb
@@ -129,6 +131,8 @@
self._next_task_id = 1
self._root_path = root_path
+ self._app_queues = {}
+
def _Dynamic_Add(self, request, response):
"""Local implementation of the Add RPC in TaskQueueService.
@@ -200,6 +204,7 @@
'oldest_task': '2009/02/02 05:37:42',
'eta_delta': '0:00:06.342511 ago',
'tasks_in_queue': 12}, ...]
+ The list of queues always includes the default queue.
"""
queues = []
queue_info = self.queue_yaml_parser(self._root_path)
@@ -325,3 +330,67 @@
queue_name: the name of the queue to remove tasks from.
"""
self._taskqueues[queue_name] = []
+
+ def _Dynamic_UpdateQueue(self, request, unused_response):
+ """Local implementation of the UpdateQueue RPC in TaskQueueService.
+
+ Must adhere to the '_Dynamic_' naming convention for stubbing to work.
+ See taskqueue_service.proto for a full description of the RPC.
+
+ Args:
+ request: A taskqueue_service_pb.TaskQueueUpdateQueueRequest.
+ unused_response: A taskqueue_service_pb.TaskQueueUpdateQueueResponse.
+ Not used.
+ """
+ queues = self._app_queues.setdefault(request.app_id(), {})
+ defensive_copy = taskqueue_service_pb.TaskQueueUpdateQueueRequest()
+ defensive_copy.CopyFrom(request)
+ queues[request.queue_name()] = defensive_copy
+
+ def _Dynamic_FetchQueues(self, request, response):
+ """Local implementation of the FetchQueues RPC in TaskQueueService.
+
+ Must adhere to the '_Dynamic_' naming convention for stubbing to work.
+ See taskqueue_service.proto for a full description of the RPC.
+
+ Args:
+ request: A taskqueue_service_pb.TaskQueueFetchQueuesRequest.
+ response: A taskqueue_service_pb.TaskQueueFetchQueuesResponse.
+ """
+ queues = self._app_queues.get(request.app_id(), {})
+ for unused_key, queue in sorted(queues.items()[:request.max_rows()]):
+ response_queue = response.add_queue()
+ response_queue.set_queue_name(queue.queue_name())
+ response_queue.set_bucket_refill_per_second(
+ queue.bucket_refill_per_second())
+ response_queue.set_bucket_capacity(queue.bucket_capacity())
+ response_queue.set_user_specified_rate(queue.user_specified_rate())
+
+ def _Dynamic_FetchQueueStats(self, request, response):
+ """Local 'random' implementation of the TaskQueueService.FetchQueueStats.
+
+ This implementation just populates the stats with random numbers.
+ Must adhere to the '_Dynamic_' naming convention for stubbing to work.
+ See taskqueue_service.proto for a full description of the RPC.
+
+ Args:
+ request: A taskqueue_service_pb.TaskQueueFetchQueueStatsRequest.
+ response: A taskqueue_service_pb.TaskQueueFetchQueueStatsResponse.
+ """
+ for _ in request.queue_name_list():
+ stats = response.add_queuestats()
+ stats.set_num_tasks(random.randint(0, request.max_num_tasks()))
+ if stats.num_tasks() == 0:
+ stats.set_oldest_eta_usec(-1)
+ else:
+ now = datetime.datetime.utcnow()
+ now_sec = time.mktime(now.timetuple())
+ stats.set_oldest_eta_usec(now_sec * 1e6 + random.randint(-1e6, 1e6))
+
+ if random.randint(0, 9) > 0:
+ scanner_info = stats.mutable_scanner_info()
+ scanner_info.set_executed_last_minute(random.randint(0, 10))
+ scanner_info.set_executed_last_hour(scanner_info.executed_last_minute()
+ + random.randint(0, 100))
+ scanner_info.set_sampling_duration_seconds(random.random() * 10000.0)
+ return
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/lib_config.py Fri Oct 23 13:54:11 2009 -0500
@@ -0,0 +1,322 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A mechanism for library configuration.
+
+Whenever App Engine library code has the need for a user-configurable
+value, it should use the following protocol:
+
+1. Pick a prefix unique to the library module, e.g. 'mylib'.
+
+2. Call lib_config.register(prefix, mapping) with that prefix as
+ the first argument and a dict mapping suffixes to default functions
+ as the second.
+
+3. The register() function returns a config handle unique to this
+ prefix. The config handle object has attributes corresponding to
+ each of the suffixes given in the mapping. Call these functions
+ (they're not really methods even though they look like methods) to
+ access the user's configuration value. If the user didn't
+ configure a function, the default function from the mapping is
+ called instead.
+
+4. Document the function name and its signature and semantics.
+
+Users wanting to provide configuration values should create a module
+named appengine_config.py in the top-level directory of their
+application, and define functions as documented by various App Engine
+library components in that module. To change the configuration, edit
+the file and re-deploy the application. (When using the SDK, no
+redeployment is required: the development server will pick up the
+changes the next time it handles a request.)
+
+Third party libraries can also use this mechanism. For casual use,
+just calling the register() method with a unique prefix is okay. For
+carefull libraries, however, it is recommended to instantiate a new
+LibConfigRegistry instance using a different module name.
+
+Example appengine_config.py file:
+
+ from somewhere import MyMiddleWareClass
+
+ def mylib_add_middleware(app):
+ app = MyMiddleWareClass(app)
+ return app
+
+Example library use:
+
+ from google.appengine.api import lib_config
+
+ config_handle = lib_config.register(
+ 'mylib',
+ {'add_middleware': lambda app: app})
+
+ def add_middleware(app):
+ return config_handle.add_middleware(app)
+"""
+
+
+
+import logging
+import os
+import sys
+
+
+DEFAULT_MODNAME = 'appengine_config'
+
+
+class LibConfigRegistry(object):
+ """A registry for library configuration values."""
+
+ def __init__(self, modname):
+ """Constructor.
+
+ Args:
+ modname: The module name to be imported.
+
+ Note: the actual import of this module is deferred until the first
+ time a configuration value is requested through attribute access
+ on a ConfigHandle instance.
+ """
+ self._modname = modname
+ self._registrations = {}
+ self._module = None
+
+ def register(self, prefix, mapping):
+ """Register a set of configuration names.
+
+ Args:
+ prefix: A shared prefix for the configuration names being registered.
+ If the prefix doesn't end in '_', that character is appended.
+ mapping: A dict mapping suffix strings to default values.
+
+ Returns:
+ A ConfigHandle instance.
+
+ It's okay to re-register the same prefix: the mappings are merged,
+ and for duplicate suffixes the most recent registration wins.
+ """
+ if not prefix.endswith('_'):
+ prefix += '_'
+ handle = self._registrations.get(prefix)
+ if handle is None:
+ handle = ConfigHandle(prefix, self)
+ self._registrations[prefix] = handle
+ handle._update_defaults(mapping)
+ return handle
+
+ def initialize(self):
+ """Attempt to import the config module, if not already imported.
+
+ This function always sets self._module to a value unequal
+ to None: either the imported module (if imported successfully), or
+ a dummy object() instance (if an ImportError was raised). Other
+ exceptions are *not* caught.
+ """
+ if self._module is not None:
+ return
+ try:
+ __import__(self._modname)
+ except ImportError, err:
+ self._module = object()
+ else:
+ self._module = sys.modules[self._modname]
+
+ def _pairs(self, prefix):
+ """Generate (key, value) pairs from the config module matching prefix.
+
+ Args:
+ prefix: A prefix string ending in '_', e.g. 'mylib_'.
+
+ Yields:
+ (key, value) pairs where key is the configuration name with
+ prefix removed, and value is the corresponding value.
+ """
+ mapping = getattr(self._module, '__dict__', None)
+ if not mapping:
+ return
+ nskip = len(prefix)
+ for key, value in mapping.iteritems():
+ if key.startswith(prefix):
+ yield key[nskip:], value
+
+ def _dump(self):
+ """Print info about all registrations to stdout."""
+ self.initialize()
+ if not hasattr(self._module, '__dict__'):
+ print 'Module %s.py does not exist.' % self._modname
+ elif not self._registrations:
+ print 'No registrations for %s.py.' % self._modname
+ else:
+ print 'Registrations in %s.py:' % self._modname
+ print '-'*40
+ for prefix in sorted(self._registrations):
+ self._registrations[prefix]._dump()
+
+
+class ConfigHandle(object):
+ """A set of configuration for a single library module or package.
+
+ Public attributes of instances of this class are configuration
+ values. Attributes are dynamically computed (in __getattr__()) and
+ cached as regular instance attributes.
+ """
+
+ _initialized = False
+
+ def __init__(self, prefix, registry):
+ """Constructor.
+
+ Args:
+ prefix: A shared prefix for the configuration names being registered.
+ It *must* end in '_'. (This is enforced by LibConfigRegistry.)
+ registry: A LibConfigRegistry instance.
+ """
+ assert prefix.endswith('_')
+ self._prefix = prefix
+ self._defaults = {}
+ self._overrides = {}
+ self._registry = registry
+
+ def _update_defaults(self, mapping):
+ """Update the default mappings.
+
+ Args:
+ mapping: A dict mapping suffix strings to default values.
+ """
+ for key, value in mapping.iteritems():
+ if key.startswith('__') and key.endswith('__'):
+ continue
+ self._defaults[key] = value
+ if self._initialized:
+ self._update_configs()
+
+ def _update_configs(self):
+ """Update the configuration values.
+
+ This clears the cached values, initializes the registry, and loads
+ the configuration values from the config module.
+ """
+ if self._initialized:
+ self._clear_cache()
+ self._registry.initialize()
+ for key, value in self._registry._pairs(self._prefix):
+ if key not in self._defaults:
+ logging.warn('Configuration "%s" not recognized', self._prefix + key)
+ else:
+ self._overrides[key] = value
+ self._initialized = True
+
+ def _clear_cache(self):
+ """Clear the cached values."""
+ for key in self._defaults:
+ try:
+ delattr(self, key)
+ except AttributeError:
+ pass
+
+ def _dump(self):
+ """Print info about this set of registrations to stdout."""
+ print 'Prefix %s:' % self._prefix
+ if self._overrides:
+ print ' Overrides:'
+ for key in sorted(self._overrides):
+ print ' %s = %r' % (key, self._overrides[key])
+ else:
+ print ' No overrides'
+ if self._defaults:
+ print ' Defaults:'
+ for key in sorted(self._defaults):
+ print ' %s = %r' % (key, self._defaults[key])
+ else:
+ print ' No defaults'
+ print '-'*40
+
+ def __getattr__(self, suffix):
+ """Dynamic attribute access.
+
+ Args:
+ suffix: The attribute name.
+
+ Returns:
+ A configuration values.
+
+ Raises:
+ AttributeError if the suffix is not a registered suffix.
+
+ The first time an attribute is referenced, this method is invoked.
+ The value returned taken either from the config module or from the
+ registered default.
+ """
+ if not self._initialized:
+ self._update_configs()
+ if suffix in self._overrides:
+ value = self._overrides[suffix]
+ elif suffix in self._defaults:
+ value = self._defaults[suffix]
+ else:
+ raise AttributeError(suffix)
+ setattr(self, suffix, value)
+ return value
+
+
+_default_registry = LibConfigRegistry(DEFAULT_MODNAME)
+
+
+def register(prefix, mapping):
+ """Register a set of configurations with the default config module.
+
+ Args:
+ prefix: A shared prefix for the configuration names being registered.
+ If the prefix doesn't end in '_', that character is appended.
+ mapping: A dict mapping suffix strings to default values.
+
+ Returns:
+ A ConfigHandle instance.
+ """
+ return _default_registry.register(prefix, mapping)
+
+
+def main():
+ """CGI-style request handler to dump the configuration.
+
+ Put this in your app.yaml to enable (you can pick any URL):
+
+ - url: /lib_config
+ script: $PYTHON_LIB/google/appengine/api/lib_config.py
+
+ Note: unless you are using the SDK, you must be admin.
+ """
+ if not os.getenv('SERVER_SOFTWARE', '').startswith('Dev'):
+ from google.appengine.api import users
+ if not users.is_current_user_admin():
+ if users.get_current_user() is None:
+ print 'Status: 302'
+ print 'Location:', users.create_login_url(os.getenv('PATH_INFO', ''))
+ else:
+ print 'Status: 403'
+ print
+ print 'Forbidden'
+ return
+
+ print 'Content-type: text/plain'
+ print
+ _default_registry._dump()
+
+
+if __name__ == '__main__':
+ main()
--- a/thirdparty/google_appengine/google/appengine/api/mail.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/mail.py Fri Oct 23 13:54:11 2009 -0500
@@ -1125,3 +1125,21 @@
yield payload_type, payload
except AttributeError:
pass
+
+ def to_mime_message(self):
+ """Convert to MIME message.
+
+ Adds additional headers from inbound email.
+
+ Returns:
+ MIME message instance of payload.
+ """
+ mime_message = super(InboundEmailMessage, self).to_mime_message()
+
+ for property, header in InboundEmailMessage.__HEADER_PROPERTIES.iteritems():
+ try:
+ mime_message[header] = getattr(self, property)
+ except AttributeError:
+ pass
+
+ return mime_message
--- a/thirdparty/google_appengine/google/appengine/api/memcache/__init__.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/__init__.py Fri Oct 23 13:54:11 2009 -0500
@@ -32,6 +32,7 @@
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import capabilities
from google.appengine.api import namespace_manager
from google.appengine.api.memcache import memcache_service_pb
from google.appengine.runtime import apiproxy_errors
@@ -78,6 +79,8 @@
TYPE_LONG = 4
TYPE_BOOL = 5
+CAPABILITY = capabilities.CapabilitySet('memcache')
+
def _key_string(key, key_prefix='', server_to_user_dict=None):
"""Utility function to handle different ways of requesting keys.
--- a/thirdparty/google_appengine/google/appengine/api/user_service_pb.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/user_service_pb.py Fri Oct 23 13:54:11 2009 -0500
@@ -28,11 +28,17 @@
OK = 0
REDIRECT_URL_TOO_LONG = 1
NOT_ALLOWED = 2
+ OAUTH_INVALID_TOKEN = 3
+ OAUTH_INVALID_REQUEST = 4
+ OAUTH_ERROR = 5
_ErrorCode_NAMES = {
0: "OK",
1: "REDIRECT_URL_TOO_LONG",
2: "NOT_ALLOWED",
+ 3: "OAUTH_INVALID_TOKEN",
+ 4: "OAUTH_INVALID_REQUEST",
+ 5: "OAUTH_ERROR",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -487,5 +493,275 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+class GetOAuthUserRequest(ProtocolBuffer.ProtocolMessage):
-__all__ = ['UserServiceError','CreateLoginURLRequest','CreateLoginURLResponse','CreateLogoutURLRequest','CreateLogoutURLResponse']
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage):
+ has_email_ = 0
+ email_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def email(self): return self.email_
+
+ def set_email(self, x):
+ self.has_email_ = 1
+ self.email_ = x
+
+ def clear_email(self):
+ if self.has_email_:
+ self.has_email_ = 0
+ self.email_ = ""
+
+ def has_email(self): return self.has_email_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_email()): self.set_email(x.email())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_email_ != x.has_email_: return 0
+ if self.has_email_ and self.email_ != x.email_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_email_): n += 1 + self.lengthString(len(self.email_))
+ return n + 0
+
+ def Clear(self):
+ self.clear_email()
+
+ def OutputUnchecked(self, out):
+ if (self.has_email_):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.email_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_email(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kemail = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "email",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CheckOAuthSignatureRequest(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CheckOAuthSignatureResponse(ProtocolBuffer.ProtocolMessage):
+ has_oauth_consumer_key_ = 0
+ oauth_consumer_key_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def oauth_consumer_key(self): return self.oauth_consumer_key_
+
+ def set_oauth_consumer_key(self, x):
+ self.has_oauth_consumer_key_ = 1
+ self.oauth_consumer_key_ = x
+
+ def clear_oauth_consumer_key(self):
+ if self.has_oauth_consumer_key_:
+ self.has_oauth_consumer_key_ = 0
+ self.oauth_consumer_key_ = ""
+
+ def has_oauth_consumer_key(self): return self.has_oauth_consumer_key_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_oauth_consumer_key()): self.set_oauth_consumer_key(x.oauth_consumer_key())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_oauth_consumer_key_ != x.has_oauth_consumer_key_: return 0
+ if self.has_oauth_consumer_key_ and self.oauth_consumer_key_ != x.oauth_consumer_key_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_oauth_consumer_key_): n += 1 + self.lengthString(len(self.oauth_consumer_key_))
+ return n + 0
+
+ def Clear(self):
+ self.clear_oauth_consumer_key()
+
+ def OutputUnchecked(self, out):
+ if (self.has_oauth_consumer_key_):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.oauth_consumer_key_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_oauth_consumer_key(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_oauth_consumer_key_: res+=prefix+("oauth_consumer_key: %s\n" % self.DebugFormatString(self.oauth_consumer_key_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ koauth_consumer_key = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "oauth_consumer_key",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['UserServiceError','CreateLoginURLRequest','CreateLoginURLResponse','CreateLogoutURLRequest','CreateLogoutURLResponse','GetOAuthUserRequest','GetOAuthUserResponse','CheckOAuthSignatureRequest','CheckOAuthSignatureResponse']
--- a/thirdparty/google_appengine/google/appengine/api/users.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/api/users.py Fri Oct 23 13:54:11 2009 -0500
@@ -151,7 +151,7 @@
(other.__email, other.__auth_domain))
-def create_login_url(dest_url):
+def create_login_url(dest_url, _auth_domain=None):
"""Computes the login URL for this request and specified destination URL.
Args:
@@ -165,6 +165,9 @@
req = user_service_pb.CreateLoginURLRequest()
resp = user_service_pb.CreateLoginURLResponse()
req.set_destination_url(dest_url)
+ if _auth_domain:
+ req.set_auth_domain(_auth_domain)
+
try:
apiproxy_stub_map.MakeSyncCall('user', 'CreateLoginURL', req, resp)
except apiproxy_errors.ApplicationError, e:
@@ -181,7 +184,7 @@
CreateLoginURL = create_login_url
-def create_logout_url(dest_url):
+def create_logout_url(dest_url, _auth_domain=None):
"""Computes the logout URL for this request and specified destination URL.
Args:
@@ -195,6 +198,9 @@
req = user_service_pb.CreateLogoutURLRequest()
resp = user_service_pb.CreateLogoutURLResponse()
req.set_destination_url(dest_url)
+ if _auth_domain:
+ req.set_auth_domain(_auth_domain)
+
try:
apiproxy_stub_map.MakeSyncCall('user', 'CreateLogoutURL', req, resp)
except apiproxy_errors.ApplicationError, e:
--- a/thirdparty/google_appengine/google/appengine/dist/_library.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/dist/_library.py Fri Oct 23 13:54:11 2009 -0500
@@ -24,7 +24,11 @@
__all__ = ['use_library']
-import distutils.version
+try:
+ import distutils.version
+except ImportError:
+ distutils = None
+
import os
import sys
--- a/thirdparty/google_appengine/google/appengine/ext/admin/templates/base.html Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/base.html Fri Oct 23 13:54:11 2009 -0500
@@ -42,9 +42,7 @@
<li><a href="{{ cron_path }}">Cron Jobs</a></li>
{% endif %}
<li><a href="{{ xmpp_path }}">XMPP</a></li>
- {% comment %}
<li><a href="{{ inboundmail_path }}">Inbound Mail</a></li>
- {% endcomment %}
</ul>
</div>
--- a/thirdparty/google_appengine/google/appengine/ext/db/__init__.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/ext/db/__init__.py Fri Oct 23 13:54:11 2009 -0500
@@ -78,6 +78,7 @@
+import base64
import copy
import datetime
import logging
@@ -90,6 +91,7 @@
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
+from google.appengine.datastore import datastore_pb
Error = datastore_errors.Error
BadValueError = datastore_errors.BadValueError
@@ -124,6 +126,9 @@
ByteString = datastore_types.ByteString
BlobKey = datastore_types.BlobKey
+READ_CAPABILITY = datastore.READ_CAPABILITY
+WRITE_CAPABILITY = datastore.WRITE_CAPABILITY
+
_kind_map = {}
@@ -616,7 +621,6 @@
def __init__(self,
parent=None,
key_name=None,
- key=None,
_app=None,
_from_entity=False,
**kwds):
@@ -642,10 +646,11 @@
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
- key: Key instance for this instance, overrides parent and key_name
_from_entity: Intentionally undocumented.
- args: Keyword arguments mapping to properties of model.
+ kwds: Keyword arguments mapping to properties of model. Also:
+ key: Key instance for this instance, overrides parent and key_name
"""
+ key = kwds.get('key', None)
if key is not None:
if isinstance(key, (tuple, list)):
key = Key.from_path(*key)
@@ -698,6 +703,11 @@
self._key = None
self._entity = None
+ if _app is not None and isinstance(_app, Key):
+ raise BadArgumentError('_app should be a string; received Key(\'%s\'):\n'
+ ' This may be the result of passing \'key\' as '
+ 'a positional parameter in SDK 1.2.6. Please '
+ 'only pass \'key\' as a keyword parameter.' % _app)
self._app = _app
for prop in self.properties().values():
@@ -1336,7 +1346,7 @@
super(Expando, self).__init__(parent, key_name, _app, **kwds)
self._dynamic_properties = {}
for prop, value in kwds.iteritems():
- if prop not in self.properties() and value is not None:
+ if prop not in self.properties():
setattr(self, prop, value)
def __setattr__(self, key, value):
@@ -1452,16 +1462,21 @@
class _BaseQuery(object):
"""Base class for both Query and GqlQuery."""
-
- def __init__(self, model_class=None, keys_only=False):
+ _compile = False
+ def __init__(self, model_class=None, keys_only=False, compile=True,
+ cursor=None):
"""Constructor.
Args:
model_class: Model class from which entities are constructed.
keys_only: Whether the query should return full entities or only keys.
+ compile: Whether the query should also return a compiled query.
+ cursor: A compiled query from which to resume.
"""
self._model_class = model_class
self._keys_only = keys_only
+ self._compile = compile
+ self._cursor = cursor
def is_keys_only(self):
"""Returns whether this query is keys only.
@@ -1488,7 +1503,10 @@
Returns:
Iterator for this query.
"""
- iterator = self._get_query().Run()
+ self._compile = False
+ raw_query = self._get_query()
+ iterator = raw_query.Run()
+
if self._keys_only:
return iterator
else:
@@ -1529,7 +1547,10 @@
Returns:
Number of entities this query fetches.
"""
- return self._get_query().Count(limit=limit)
+ self._compile = False
+ raw_query = self._get_query()
+ result = raw_query.Count(limit=limit)
+ return result
def fetch(self, limit, offset=0):
"""Return a list of items selected using SQL-like limit and offset.
@@ -1554,7 +1575,13 @@
raise ValueError('Arguments to fetch() must be >= 0')
if limit == 0:
return []
- raw = self._get_query().Get(limit, offset)
+ raw_query = self._get_query()
+ raw = raw_query.Get(limit, offset)
+ if self._compile:
+ try:
+ self._compiled_query = raw_query.GetCompiledQuery()
+ except AssertionError, e:
+ self._compiled_query = e
if self._keys_only:
return raw
@@ -1564,6 +1591,36 @@
else:
return [class_for_kind(e.kind()).from_entity(e) for e in raw]
+ def cursor(self):
+ if not self._compile:
+ raise AssertionError('No cursor available, this action does not support '
+ 'cursors (try "fetch" instead)')
+ try:
+ if not self._compiled_query:
+ return self._compiled_query
+ if isinstance(self._compiled_query, Exception):
+ raise self._compiled_query
+ return base64.urlsafe_b64encode(self._compiled_query.Encode())
+ except AttributeError:
+ raise AssertionError('No cursor available, this query has not been '
+ 'executed')
+
+ def with_cursor(self, cursor):
+ try:
+ assert cursor, "Cursor cannot be empty"
+ cursor = datastore_pb.CompiledQuery(base64.urlsafe_b64decode(cursor))
+ assert cursor.IsInitialized()
+ except (AssertionError, TypeError), e:
+ raise datastore_errors.BadValueError(
+ 'Invalid cursor %s. Details: %s' % (cursor, e))
+ except Exception, e:
+ if e.__class__.__name__ == 'ProtocolBufferDecodeError':
+ raise datastore_errors.BadValueError('Invalid cursor %s.' % cursor)
+ else:
+ raise
+ self._cursor = cursor
+ return self
+
def __getitem__(self, arg):
"""Support for query[index] and query[start:stop].
@@ -1707,14 +1764,15 @@
print story.title
"""
- def __init__(self, model_class=None, keys_only=False):
+ def __init__(self, model_class=None, keys_only=False, cursor=None):
"""Constructs a query over instances of the given Model.
Args:
model_class: Model class to build query for.
keys_only: Whether the query should return full entities or only keys.
+ cursor: A compiled query from which to resume.
"""
- super(Query, self).__init__(model_class, keys_only)
+ super(Query, self).__init__(model_class, keys_only, cursor=cursor)
self.__query_sets = [{}]
self.__orderings = []
self.__ancestor = None
@@ -1730,7 +1788,9 @@
kind = None
query = _query_class(kind,
query_set,
- keys_only=self._keys_only)
+ keys_only=self._keys_only,
+ compile=self._compile,
+ cursor=self._cursor)
query.Order(*self.__orderings)
if self.__ancestor is not None:
query.Ancestor(self.__ancestor)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/db/stats.py Fri Oct 23 13:54:11 2009 -0500
@@ -0,0 +1,148 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Models to be used when accessing app specific datastore usage statistics.
+
+These entities cannot be created by users, but are populated in the
+application's datastore by offline processes run by the Google App Engine team.
+"""
+
+
+
+
+try:
+ from google.appengine.ext import db
+except ImportError:
+ from google.appengine.ext import db
+
+
+class BaseStatistic(db.Model):
+ """Base Statistic Model class.
+
+ The 'bytes' attribute represents the total number of bytes taken up in the
+ datastore for the statistic instance. The 'count' attribute is the
+ total number of occurrences of the statistic in the datastore. The
+ 'timestamp' is when the statistic instance was written to the datastore.
+ """
+ STORED_KIND_NAME = '__BaseStatistic__'
+
+ bytes = db.IntegerProperty()
+
+ count = db.IntegerProperty()
+
+ timestamp = db.DateTimeProperty()
+
+ @classmethod
+ def kind(cls):
+ """Kind name override."""
+ return cls.STORED_KIND_NAME
+
+
+class BaseKindStatistic(BaseStatistic):
+ """Base Statistic Model class for stats associated with kinds.
+
+ The 'kind_name' attribute represents the name of the kind associated with the
+ statistic instance.
+ """
+ STORED_KIND_NAME = '__BaseKindStatistic__'
+
+ kind_name = db.StringProperty()
+
+
+class GlobalStat(BaseStatistic):
+ """An aggregate of all entities across the entire application.
+
+ This statistic only has a single instance in the datastore that contains the
+ total number of entities stored and the total number of bytes they take up.
+ """
+ STORED_KIND_NAME = '__Stat_Total__'
+
+
+class KindStat(BaseKindStatistic):
+ """An aggregate of all entities at the granularity of their Kind.
+
+ There is an instance of the KindStat for every Kind that is in the
+ application's datastore. This stat contains per-Kind statistics.
+ """
+ STORED_KIND_NAME = '__Stat_Kind__'
+
+
+class KindRootEntityStat(BaseKindStatistic):
+ """Statistics of the number of root entities in the datastore by Kind.
+
+ There is an instance of the KindRootEntityState for every Kind that is in the
+ application's datastore and has an instance that is a root entity. This stat
+ contains statistics regarding these root entity instances.
+ """
+ STORED_KIND_NAME = '__Stat_Kind_IsRootEntity__'
+
+
+class KindNonRootEntityStat(BaseKindStatistic):
+ """Statistics of the number of non root entities in the datastore by Kind.
+
+ There is an instance of the KindNonRootEntityStat for every Kind that is in
+ the application's datastore that is a not a root entity. This stat contains
+ statistics regarding thse non root entity instances.
+ """
+ STORED_KIND_NAME = '__Stat_Kind_NotRootEntity__'
+
+
+class PropertyTypeStat(BaseStatistic):
+ """An aggregate of all properties across the entire application by type.
+
+ There is an instance of the PropertyTypeStat for every property type
+ (google.appengine.api.datastore_types._PROPERTY_TYPES) in use by the
+ application in its datastore.
+ """
+ STORED_KIND_NAME = '__Stat_PropertyType__'
+
+ property_type = db.StringProperty()
+
+
+class KindPropertyTypeStat(BaseKindStatistic):
+ """Statistics on (kind, property_type) tuples in the app's datastore.
+
+ There is an instance of the KindPropertyTypeStat for every
+ (kind, property_type) tuple in the application's datastore.
+ """
+ STORED_KIND_NAME = '__Stat_PropertyType_Kind__'
+
+ property_type = db.StringProperty()
+
+
+class KindPropertyNameStat(BaseKindStatistic):
+ """Statistics on (kind, property_name) tuples in the app's datastore.
+
+ There is an instance of the KindPropertyNameStat for every
+ (kind, property_type) tuple in the application's datastore.
+ """
+ STORED_KIND_NAME = '__Stat_PropertyName_Kind__'
+
+ property_name = db.StringProperty()
+
+
+class KindPropertyNamePropertyTypeStat(BaseKindStatistic):
+ """Statistic on (kind, property_name, property_type) tuples in the datastore.
+
+ There is an instance of the KindPropertyNamePropertyTypeStat for every
+ (kind, property_name, property_type) tuple in the application's datastore.
+ """
+ STORED_KIND_NAME = '__Stat_PropertyType_PropertyName_Kind__'
+
+ property_type = db.StringProperty()
+
+ property_name = db.StringProperty()
--- a/thirdparty/google_appengine/google/appengine/ext/gql/__init__.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/ext/gql/__init__.py Fri Oct 23 13:54:11 2009 -0500
@@ -104,7 +104,7 @@
Execute('SELECT * FROM Story WHERE Author = :1 AND Date > :2')
- Named parameters
Execute('SELECT * FROM Story WHERE Author = :author AND Date > :date')
- - Literals (numbers, and strings)
+ - Literals (numbers, strings, booleans, and NULL)
Execute('SELECT * FROM Story WHERE Author = \'James\'')
Users are also given the option of doing type conversions to other datastore
@@ -343,7 +343,9 @@
def __CastUser(self, values):
"""Cast to User() class using the email address in values[0]."""
if len(values) != 1:
- self.__CastError(values, 'user', 'requires one and only one value')
+ self.__CastError('user', values, 'requires one and only one value')
+ elif values[0] is None:
+ self.__CastError('user', values, 'must be non-null')
else:
return users.User(email=values[0], _auth_domain=self.__auth_domain)
@@ -420,7 +422,8 @@
elif len(values) <= 4:
time_tuple = (1970, 1, 1) + tuple(values)
else:
- self.__CastError('TIME', values, err)
+ self.__CastError('TIME', values,
+ 'function takes 1 to 4 integers or 1 string')
try:
return datetime.datetime(*time_tuple)
@@ -929,7 +932,7 @@
filter_rule = (self.__ANCESTOR, 'is')
assert condition.lower() == 'is'
- if condition.lower() != 'in' and operator == 'list':
+ if operator == 'list' and condition.lower() != 'in':
self.__Error('Only IN can process a list of values')
self.__filters.setdefault(filter_rule, []).append((operator, parameters))
@@ -1008,6 +1011,9 @@
if literal is not None:
return Literal(literal)
+
+ if self.__Accept('NULL'):
+ return Literal(None)
else:
return None
--- a/thirdparty/google_appengine/google/appengine/ext/key_range/__init__.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/ext/key_range/__init__.py Fri Oct 23 13:54:11 2009 -0500
@@ -172,7 +172,7 @@
assert isinstance(query, datastore.Query)
if self.key_start == self.key_end and not (
self.include_start or self.include_end):
- return EmptyDatastoreQuery(query.kind)
+ return EmptyDatastoreQuery(query._ToPb().kind())
if self.include_start:
start_comparator = '>='
else:
--- a/thirdparty/google_appengine/google/appengine/ext/remote_api/handler.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/ext/remote_api/handler.py Fri Oct 23 13:54:11 2009 -0500
@@ -42,19 +42,26 @@
import os
import pickle
import sha
+import sys
import wsgiref.handlers
import yaml
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub
from google.appengine.api import apiproxy_stub_map
-from google.appengine.api import datastore_errors
from google.appengine.api import mail_service_pb
from google.appengine.api import urlfetch_service_pb
from google.appengine.api import users
from google.appengine.api.capabilities import capability_service_pb
from google.appengine.api.images import images_service_pb
from google.appengine.api.memcache import memcache_service_pb
+try:
+ __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb')
+ taskqueue_service_pb = sys.modules.get(
+ 'google.appengine.api.labs.taskqueue.taskqueue_service_pb')
+except ImportError:
+ from google.appengine.api.taskqueue import taskqueue_service_pb
+from google.appengine.api.xmpp import xmpp_service_pb
from google.appengine.datastore import datastore_pb
from google.appengine.ext import webapp
from google.appengine.ext.remote_api import remote_api_pb
@@ -73,6 +80,19 @@
RunQuery that immediately returns the query results.
"""
+ def __init__(self, service='datastore_v3', _test_stub_map=None):
+ """Constructor.
+
+ Args:
+ service: The name of the service
+ _test_stub_map: An APIProxyStubMap to use for testing purposes.
+ """
+ super(RemoteDatastoreStub, self).__init__(service)
+ if _test_stub_map:
+ self.__call = _test_stub_map.MakeSyncCall
+ else:
+ self.__call = apiproxy_stub_map.MakeSyncCall
+
def _Dynamic_RunQuery(self, request, response):
"""Handle a RunQuery request.
@@ -80,8 +100,7 @@
of the Next request.
"""
runquery_response = datastore_pb.QueryResult()
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery',
- request, runquery_response)
+ self.__call('datastore_v3', 'RunQuery', request, runquery_response)
if runquery_response.result_size() > 0:
response.CopyFrom(runquery_response)
return
@@ -89,8 +108,7 @@
next_request = datastore_pb.NextRequest()
next_request.mutable_cursor().CopyFrom(runquery_response.cursor())
next_request.set_count(request.limit())
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next',
- next_request, response)
+ self.__call('datastore_v3', 'Next', next_request, response)
def _Dynamic_Transaction(self, request, response):
"""Handle a Transaction request.
@@ -102,8 +120,7 @@
transaction of its own to make the updates.
"""
tx = datastore_pb.Transaction()
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
- api_base_pb.VoidProto(), tx)
+ self.__call('datastore_v3', 'BeginTransaction', api_base_pb.VoidProto(), tx)
preconditions = request.precondition_list()
if preconditions:
@@ -113,8 +130,7 @@
key = get_request.add_key()
key.CopyFrom(precondition.key())
get_response = datastore_pb.GetResponse()
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Get', get_request,
- get_response)
+ self.__call('datastore_v3', 'Get', get_request, get_response)
entities = get_response.entity_list()
assert len(entities) == request.precondition_size()
for precondition, entity in zip(preconditions, entities):
@@ -132,17 +148,15 @@
if request.has_puts():
put_request = request.puts()
put_request.mutable_transaction().CopyFrom(tx)
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put',
- put_request, response)
+ self.__call('datastore_v3', 'Put', put_request, response)
if request.has_deletes():
delete_request = request.deletes()
delete_request.mutable_transaction().CopyFrom(tx)
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Delete',
- delete_request, api_base_pb.VoidProto())
+ self.__call('datastore_v3', 'Delete', delete_request,
+ api_base_pb.VoidProto())
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit', tx,
- api_base_pb.VoidProto())
+ self.__call('datastore_v3', 'Commit', tx, api_base_pb.VoidProto())
def _Dynamic_GetIDs(self, request, response):
"""Fetch unique IDs for a set of paths."""
@@ -154,13 +168,11 @@
assert lastpart.id() == 0 and not lastpart.has_name()
tx = datastore_pb.Transaction()
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
- api_base_pb.VoidProto(), tx)
+ self.__call('datastore_v3', 'BeginTransaction', api_base_pb.VoidProto(), tx)
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put', request, response)
+ self.__call('datastore_v3', 'Put', request, response)
- apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback', tx,
- api_base_pb.VoidProto())
+ self.__call('datastore_v3', 'Rollback', tx, api_base_pb.VoidProto())
SERVICE_PB_MAP = {
@@ -174,6 +186,8 @@
'Delete': (datastore_pb.DeleteRequest, datastore_pb.DeleteResponse),
'Count': (datastore_pb.Query, api_base_pb.Integer64Proto),
'GetIndices': (api_base_pb.StringProto, datastore_pb.CompositeIndices),
+ 'AllocateIds':(datastore_pb.AllocateIdsRequest,
+ datastore_pb.AllocateIdsResponse),
},
'images': {
'Transform': (images_service_pb.ImagesTransformRequest,
@@ -201,6 +215,17 @@
'Stats': (memcache_service_pb.MemcacheStatsRequest,
memcache_service_pb.MemcacheStatsResponse),
},
+ 'taskqueue': {
+ 'Add': (taskqueue_service_pb.TaskQueueAddRequest,
+ taskqueue_service_pb.TaskQueueAddResponse),
+ 'UpdateQueue':(taskqueue_service_pb.TaskQueueUpdateQueueRequest,
+ taskqueue_service_pb.TaskQueueUpdateQueueResponse),
+ 'FetchQueues':(taskqueue_service_pb.TaskQueueFetchQueuesRequest,
+ taskqueue_service_pb.TaskQueueFetchQueuesResponse),
+ 'FetchQueueStats':(
+ taskqueue_service_pb.TaskQueueFetchQueueStatsRequest,
+ taskqueue_service_pb.TaskQueueFetchQueueStatsResponse),
+ },
'remote_datastore': {
'RunQuery': (datastore_pb.Query, datastore_pb.QueryResult),
'Transaction': (remote_api_pb.TransactionRequest,
@@ -211,6 +236,14 @@
'Fetch': (urlfetch_service_pb.URLFetchRequest,
urlfetch_service_pb.URLFetchResponse),
},
+ 'xmpp': {
+ 'GetPresence': (xmpp_service_pb.PresenceRequest,
+ xmpp_service_pb.PresenceResponse),
+ 'SendMessage': (xmpp_service_pb.XmppMessageRequest,
+ xmpp_service_pb.XmppMessageResponse),
+ 'SendInvite': (xmpp_service_pb.XmppInviteRequest,
+ xmpp_service_pb.XmppInviteResponse),
+ },
}
@@ -267,10 +300,10 @@
logging.exception('Exception while handling %s', request)
self.response.set_status(200)
response.mutable_exception().set_contents(pickle.dumps(e))
- if isinstance(e, datastore_errors.Error):
+ if isinstance(e, apiproxy_errors.ApplicationError):
application_error = response.mutable_application_error()
- application_error.setCode(e.application_error)
- application_error.setDetail(e.error_detail)
+ application_error.set_code(e.application_error)
+ application_error.set_detail(e.error_detail)
self.response.out.write(response.Encode())
def ExecuteRequest(self, request):
--- a/thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_stub.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_stub.py Fri Oct 23 13:54:11 2009 -0500
@@ -131,7 +131,7 @@
You can use this to stub out any service that the remote server supports.
"""
- def __init__(self, server, path):
+ def __init__(self, server, path, _test_stub_map=None):
"""Constructs a new RemoteStub that communicates with the specified server.
Args:
@@ -141,6 +141,7 @@
"""
self._server = server
self._path = path
+ self._test_stub_map = _test_stub_map
def _PreHookHandler(self, service, call, request, response):
pass
@@ -150,6 +151,16 @@
def MakeSyncCall(self, service, call, request, response):
self._PreHookHandler(service, call, request, response)
+ try:
+ test_stub = self._test_stub_map and self._test_stub_map.GetStub(service)
+ if test_stub:
+ test_stub.MakeSyncCall(service, call, request, response)
+ else:
+ self._MakeRealSyncCall(service, call, request, response)
+ finally:
+ self._PostHookHandler(service, call, request, response)
+
+ def _MakeRealSyncCall(self, service, call, request, response):
request_pb = remote_api_pb.Request()
request_pb.set_service_name(service)
request_pb.set_method(call)
@@ -160,20 +171,17 @@
encoded_response = self._server.Send(self._path, encoded_request)
response_pb.ParseFromString(encoded_response)
- try:
- if response_pb.has_application_error():
- error_pb = response_pb.application_error()
- raise datastore._ToDatastoreError(
- apiproxy_errors.ApplicationError(error_pb.code(), error_pb.detail()))
- elif response_pb.has_exception():
- raise pickle.loads(response_pb.exception().contents())
- elif response_pb.has_java_exception():
- raise UnknownJavaServerError("An unknown error has occured in the "
- "Java remote_api handler for this call.")
- else:
- response.ParseFromString(response_pb.response().contents())
- finally:
- self._PostHookHandler(service, call, request, response)
+ if response_pb.has_application_error():
+ error_pb = response_pb.application_error()
+ raise apiproxy_errors.ApplicationError(error_pb.code(),
+ error_pb.detail())
+ elif response_pb.has_exception():
+ raise pickle.loads(response_pb.exception().contents())
+ elif response_pb.has_java_exception():
+ raise UnknownJavaServerError("An unknown error has occured in the "
+ "Java remote_api handler for this call.")
+ else:
+ response.ParseFromString(response_pb.response().contents())
def CreateRPC(self):
return apiproxy_rpc.RPC(stub=self)
@@ -187,8 +195,19 @@
Transactions on the remote datastore are unfortunately still impossible.
"""
- def __init__(self, server, path):
- super(RemoteDatastoreStub, self).__init__(server, path)
+ def __init__(self, server, path, default_result_count=20,
+ _test_stub_map=None):
+ """Constructor.
+
+ Args:
+ server: The server name to connect to.
+ path: The URI path on the server.
+ default_result_count: The number of items to fetch, by default, in a
+ datastore Query or Next operation. This affects the batch size of
+ query iterators.
+ """
+ super(RemoteDatastoreStub, self).__init__(server, path, _test_stub_map)
+ self.default_result_count = default_result_count
self.__queries = {}
self.__transactions = {}
@@ -237,13 +256,14 @@
query_result.set_more_results(False)
return
+ if next_request.has_count():
+ result_count = next_request.count()
+ else:
+ result_count = self.default_result_count
+
request = datastore_pb.Query()
request.CopyFrom(query)
- if request.has_limit():
- request.set_limit(min(request.limit(), next_request.count()))
- else:
- request.set_limit(next_request.count())
- request.set_count(request.limit())
+ request.set_count(result_count)
super(RemoteDatastoreStub, self).MakeSyncCall(
'remote_datastore', 'RunQuery', request, query_result)
@@ -416,13 +436,27 @@
'The remote datastore does not support index manipulation.')
+ALL_SERVICES = set([
+ 'capability_service',
+ 'datastore_v3',
+ 'images',
+ 'mail',
+ 'memcache',
+ 'taskqueue',
+ 'urlfetch',
+ 'xmpp',
+])
+
+
def ConfigureRemoteApi(app_id,
path,
auth_func,
servername=None,
rpc_server_factory=appengine_rpc.HttpRpcServer,
rtok=None,
- secure=False):
+ secure=False,
+ services=None,
+ default_auth_domain=None):
"""Does necessary setup to allow easy remote access to App Engine APIs.
Either servername must be provided or app_id must not be None. If app_id
@@ -443,6 +477,9 @@
rtok: The validation token to sent with app_id lookups. If None, a random
token is used.
secure: Use SSL when communicating with the server.
+ services: A list of services to set up stubs for. If specified, only those
+ services are configured; by default all supported services are configured.
+ default_auth_domain: The authentication domain to use by default.
Raises:
urllib2.HTTPError: if app_id is not provided and there is an error while
@@ -473,13 +510,27 @@
repr(app_info['rtok'])))
app_id = app_info['app_id']
+ if services is not None:
+ services = set(services)
+ unsupported = services.difference(ALL_SERVICES)
+ if unsupported:
+ raise ConfigurationError('Unsupported service(s): %s'
+ % (', '.join(unsupported),))
+ else:
+ services = set(ALL_SERVICES)
+
os.environ['APPLICATION_ID'] = app_id
+ if default_auth_domain:
+ os.environ['AUTH_DOMAIN'] = default_auth_domain
+ elif 'AUTH_DOMAIN' not in os.environ:
+ os.environ['AUTH_DOMAIN'] = 'gmail.com'
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
- datastore_stub = RemoteDatastoreStub(server, path)
- apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore_stub)
+ if 'datastore_v3' in services:
+ services.remove('datastore_v3')
+ datastore_stub = RemoteDatastoreStub(server, path)
+ apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore_stub)
stub = RemoteStub(server, path)
- for service in ['capability_service', 'images', 'mail', 'memcache',
- 'urlfetch']:
+ for service in services:
apiproxy_stub_map.apiproxy.RegisterStub(service, stub)
--- a/thirdparty/google_appengine/google/appengine/ext/webapp/util.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/ext/webapp/util.py Fri Oct 23 13:54:11 2009 -0500
@@ -21,13 +21,18 @@
-__all__ = ["login_required", "run_wsgi_app"]
+__all__ = ['login_required',
+ 'run_wsgi_app',
+ 'add_wsgi_middleware',
+ 'run_bare_wsgi_app',
+ ]
import os
import sys
import wsgiref.util
from google.appengine.api import users
+from google.appengine.api import lib_config
from google.appengine.ext import webapp
@@ -58,13 +63,47 @@
return check_login
+_config_handle = lib_config.register(
+ 'webapp',
+ {'add_wsgi_middleware': lambda app: app})
+
+
def run_wsgi_app(application):
"""Runs your WSGI-compliant application object in a CGI environment.
Compared to wsgiref.handlers.CGIHandler().run(application), this
function takes some shortcuts. Those are possible because the
app server makes stronger promises than the CGI standard.
+
+ Also, this function may wrap custom WSGI middleware around the
+ application. (You can use run_bare_wsgi_app() to run an application
+ without adding WSGI middleware, and add_wsgi_middleware() to wrap
+ the configured WSGI middleware around an application without running
+ it. This function is merely a convenient combination of the latter
+ two.)
+
+ To configure custom WSGI middleware, define a function
+ webapp_add_wsgi_middleware(app) to your appengine_config.py file in
+ your application root directory:
+
+ def webapp_add_wsgi_middleware(app):
+ app = MiddleWareClassOne(app)
+ app = MiddleWareClassTwo(app)
+ return app
+
+ You must import the middleware classes elsewhere in the file. If
+ the function is not found, no WSGI middleware is added.
"""
+ run_bare_wsgi_app(add_wsgi_middleware(application))
+
+
+def add_wsgi_middleware(application):
+ """Wrap WSGI middleware around a WSGI application object."""
+ return _config_handle.add_wsgi_middleware(application)
+
+
+def run_bare_wsgi_app(application):
+ """Like run_wsgi_app() but doesn't add WSGI middleware."""
env = dict(os.environ)
env["wsgi.input"] = sys.stdin
env["wsgi.errors"] = sys.stderr
--- a/thirdparty/google_appengine/google/appengine/tools/appcfg.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/tools/appcfg.py Fri Oct 23 13:54:11 2009 -0500
@@ -1693,8 +1693,9 @@
default='appengine.google.com',
metavar='SERVER', help='The server to connect to.')
parser.add_option('--secure', action='store_true', dest='secure',
- default=False,
- help='Use SSL when communicating with the server.')
+ default=True, help=optparse.SUPPRESS_HELP)
+ parser.add_option('--insecure', action='store_false', dest='secure',
+ help='Use HTTP when communicating with the server.')
parser.add_option('-e', '--email', action='store', dest='email',
metavar='EMAIL', default=None,
help='The username to use. Will prompt if omitted.')
@@ -1707,6 +1708,10 @@
parser.add_option('--passin', action='store_true',
dest='passin', default=False,
help='Read the login password from stdin.')
+ parser.add_option('-A', '--application', action='store', dest='app_id',
+ help='Override application from app.yaml file.')
+ parser.add_option('-V', '--version', action='store', dest='version',
+ help='Override (major) version from app.yaml file.')
return parser
def _MakeSpecificParser(self, action):
@@ -1757,6 +1762,8 @@
return (email, password)
+ StatusUpdate('Server: %s.' % self.options.server)
+
if self.options.host and self.options.host == 'localhost':
email = self.options.email
if email is None:
@@ -1768,7 +1775,9 @@
GetUserAgent(),
GetSourceName(),
host_override=self.options.host,
- save_cookies=self.options.save_cookies)
+ save_cookies=self.options.save_cookies,
+
+ secure=False)
server.authenticated = True
return server
@@ -1824,6 +1833,20 @@
appyaml = appinfo.LoadSingleAppInfo(fh)
finally:
fh.close()
+ orig_application = appyaml.application
+ orig_version = appyaml.version
+ if self.options.app_id:
+ appyaml.application = self.options.app_id
+ if self.options.version:
+ appyaml.version = self.options.version
+ msg = 'Application: %s' % appyaml.application
+ if appyaml.application != orig_application:
+ msg += ' (was: %s)' % orig_application
+ msg += '; version: %s' % appyaml.version
+ if appyaml.version != orig_version:
+ msg += ' (was: %s)' % orig_version
+ msg += '.'
+ StatusUpdate(msg)
return appyaml
def _ParseYamlFile(self, basepath, basename, parser):
--- a/thirdparty/google_appengine/google/appengine/tools/appengine_rpc.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/tools/appengine_rpc.py Fri Oct 23 13:54:11 2009 -0500
@@ -107,7 +107,7 @@
def __init__(self, host, auth_function, user_agent, source,
host_override=None, extra_headers=None, save_cookies=False,
- auth_tries=3, account_type=None, debug_data=True, secure=False):
+ auth_tries=3, account_type=None, debug_data=True, secure=True):
"""Creates a new HttpRpcServer.
Args:
--- a/thirdparty/google_appengine/google/appengine/tools/bulkloader.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/tools/bulkloader.py Fri Oct 23 13:54:11 2009 -0500
@@ -395,10 +395,6 @@
self.row_count += 1
if self.column_count is None:
self.column_count = len(row)
- else:
- if self.column_count != len(row):
- raise ResumeError('Column count mismatch, %d: %s' %
- (self.column_count, str(row)))
self.read_rows.append((self.line_number, row))
self.line_number += 1
@@ -1186,6 +1182,20 @@
self.auth_called = True
return (email, password)
+ def IncrementId(self, ancestor_path, kind, high_id):
+ """Increment the unique id counter associated with ancestor_path and kind.
+
+ Args:
+ ancestor_path: A list encoding the path of a key.
+ kind: The string name of a kind.
+ high_id: The int value to which to increment the unique id counter.
+ """
+ model_key = datastore.Key.from_path(*(ancestor_path + [kind, 1]))
+ start, end = datastore.AllocateIds(model_key, 1)
+ if end < high_id:
+ start, end = datastore.AllocateIds(model_key, high_id - end)
+ assert end >= high_id
+
def EncodeContent(self, rows, loader=None):
"""Encodes row data to the wire format.
@@ -2368,6 +2378,14 @@
"""
Loader.__loaders[loader.kind] = loader
+ def get_high_ids(self):
+ """Returns dict {ancestor_path : {kind : id}} with high id values.
+
+ The returned dictionary is used to increment the id counters
+ associated with each ancestor_path and kind to be at least id.
+ """
+ return {}
+
def alias_old_names(self):
"""Aliases method names so that Loaders defined with old names work."""
aliases = (
@@ -2546,21 +2564,54 @@
cursor = db_conn.cursor()
cursor.execute('select id, value from result')
for entity_id, value in cursor:
- self.queue.put([entity_id, value], block=True)
+ self.queue.put(value, block=True)
self.queue.put(RestoreThread._ENTITIES_DONE, block=True)
class RestoreLoader(Loader):
"""A Loader which imports protobuffers from a file."""
- def __init__(self, kind):
+ def __init__(self, kind, app_id):
self.kind = kind
+ self.app_id = app_id
def initialize(self, filename, loader_opts):
CheckFile(filename)
self.queue = Queue.Queue(1000)
restore_thread = RestoreThread(self.queue, filename)
restore_thread.start()
+ self.high_id_table = self._find_high_id(self.generate_records(filename))
+ restore_thread = RestoreThread(self.queue, filename)
+ restore_thread.start()
+
+ def get_high_ids(self):
+ return dict(self.high_id_table)
+
+ def _find_high_id(self, record_generator):
+ """Find the highest numeric id used for each ancestor-path, kind pair.
+
+ Args:
+ record_generator: A generator of entity_encoding strings.
+
+ Returns:
+ A map from ancestor-path to maps from kind to id. {path : {kind : id}}
+ """
+ high_id = {}
+ for values in record_generator:
+ entity = self.create_entity(values)
+ key = entity.key()
+ if not key.id():
+ continue
+ kind = key.kind()
+ ancestor_path = []
+ if key.parent():
+ ancestor_path = key.parent().to_path()
+ if tuple(ancestor_path) not in high_id:
+ high_id[tuple(ancestor_path)] = {}
+ kind_map = high_id[tuple(ancestor_path)]
+ if kind not in kind_map or kind_map[kind] < key.id():
+ kind_map[kind] = key.id()
+ return high_id
def generate_records(self, filename):
while True:
@@ -2570,10 +2621,33 @@
yield record
def create_entity(self, values, key_name=None, parent=None):
- key = StrKey(unicode(values[0], 'utf-8'))
- entity_proto = entity_pb.EntityProto(contents=str(values[1]))
- entity_proto.mutable_key().CopyFrom(key._Key__reference)
- return datastore.Entity._FromPb(entity_proto)
+ entity_proto = entity_pb.EntityProto(contents=str(values))
+ fixed_entity_proto = self._translate_entity_proto(entity_proto)
+ return datastore.Entity._FromPb(fixed_entity_proto)
+
+ def rewrite_reference_proto(self, reference_proto):
+ """Transform the Reference protobuffer which underlies keys and references.
+
+ Args:
+ reference_proto: A Onestore Reference proto
+ """
+ reference_proto.set_app(self.app_id)
+
+ def _translate_entity_proto(self, entity_proto):
+ """Transform the ReferenceProperties of the given entity to fix app_id."""
+ entity_key = entity_proto.mutable_key()
+ entity_key.set_app(self.app_id)
+ for prop in entity_proto.property_list():
+ prop_value = prop.mutable_value()
+ if prop_value.has_referencevalue():
+ self.rewrite_reference_proto(prop_value.mutable_referencevalue())
+
+ for prop in entity_proto.raw_property_list():
+ prop_value = prop.mutable_value()
+ if prop_value.has_referencevalue():
+ self.rewrite_reference_proto(prop_value.mutable_referencevalue())
+
+ return entity_proto
class Exporter(object):
@@ -2662,7 +2736,7 @@
for name, fn, default in self.__properties:
try:
encoding.append(fn(entity[name]))
- except AttributeError:
+ except KeyError:
if default is None:
raise MissingPropertyError(name)
else:
@@ -2954,6 +3028,10 @@
unused_query, unused_fragment) = urlparse.urlsplit(self.post_url)
self.secure = (scheme == 'https')
+ def RunPostAuthentication(self):
+ """Method that gets called after authentication."""
+ pass
+
def Run(self):
"""Perform the work of the BulkTransporterApp.
@@ -2971,29 +3049,31 @@
threading.currentThread().exit_flag = False
progress_queue = self.progress_queue_factory(self.max_queue_size)
- request_manager = self.request_manager_factory(self.app_id,
- self.host_port,
- self.url_path,
- self.kind,
- self.throttle,
- self.batch_size,
- self.secure,
- self.email,
- self.passin,
- self.dry_run)
+ self.request_manager = self.request_manager_factory(self.app_id,
+ self.host_port,
+ self.url_path,
+ self.kind,
+ self.throttle,
+ self.batch_size,
+ self.secure,
+ self.email,
+ self.passin,
+ self.dry_run)
try:
- request_manager.Authenticate()
+ self.request_manager.Authenticate()
except Exception, e:
self.error = True
if not isinstance(e, urllib2.HTTPError) or (
e.code != 302 and e.code != 401):
logger.exception('Exception during authentication')
raise AuthenticationError()
- if (request_manager.auth_called and
- not request_manager.authenticated):
+ if (self.request_manager.auth_called and
+ not self.request_manager.authenticated):
self.error = True
raise AuthenticationError('Authentication failed')
+ self.RunPostAuthentication()
+
for thread in thread_pool.Threads():
self.throttle.Register(thread)
@@ -3007,7 +3087,7 @@
progress_generator_factory = None
self.data_source_thread = (
- self.datasourcethread_factory(request_manager,
+ self.datasourcethread_factory(self.request_manager,
thread_pool,
progress_queue,
self.input_generator_factory,
@@ -3092,6 +3172,13 @@
def __init__(self, *args, **kwargs):
BulkTransporterApp.__init__(self, *args, **kwargs)
+ def RunPostAuthentication(self):
+ loader = Loader.RegisteredLoader(self.kind)
+ high_id_table = loader.get_high_ids()
+ for ancestor_path, kind_map in high_id_table.iteritems():
+ for kind, high_id in kind_map.iteritems():
+ self.request_manager.IncrementId(list(ancestor_path), kind, high_id)
+
def ReportStatus(self):
"""Display a message reporting the final status of the transfer."""
total_up, duration = self.throttle.TotalTransferred(
@@ -3625,7 +3712,7 @@
if dump:
Exporter.RegisterExporter(DumpExporter(kind, result_db_filename))
elif restore:
- Loader.RegisterLoader(RestoreLoader(kind))
+ Loader.RegisterLoader(RestoreLoader(kind, app_id))
else:
LoadConfig(config_file)
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py Fri Oct 23 13:54:11 2009 -0500
@@ -1202,6 +1202,7 @@
print >>sys.stderr, indent + (message % args)
_WHITE_LIST_C_MODULES = [
+ 'py_streamhtmlparser',
'AES',
'ARC2',
'ARC4',
@@ -2790,6 +2791,9 @@
self._modules.clear()
self._modules.update(self._default_modules)
sys.path_hooks[:] = self._save_path_hooks
+ apiproxy_stub_map.apiproxy.GetPreCallHooks().Clear()
+ apiproxy_stub_map.apiproxy.GetPostCallHooks().Clear()
+
@@ -3270,8 +3274,8 @@
contain the app.yaml, indexes.yaml, and queues.yaml files.
login_url: Relative URL which should be used for handling user login/logout.
datastore_path: Path to the file to store Datastore file stub data in.
- history_path: Path to the file to store Datastore history in.
- clear_datastore: If the datastore and history should be cleared on startup.
+ history_path: DEPRECATED, No-op.
+ clear_datastore: If the datastore should be cleared on startup.
smtp_host: SMTP host used for sending test mail.
smtp_port: SMTP port.
smtp_user: SMTP user.
@@ -3286,7 +3290,6 @@
root_path = config.get('root_path', None)
login_url = config['login_url']
datastore_path = config['datastore_path']
- history_path = config['history_path']
clear_datastore = config['clear_datastore']
require_indexes = config.get('require_indexes', False)
smtp_host = config.get('smtp_host', None)
@@ -3301,18 +3304,18 @@
os.environ['APPLICATION_ID'] = app_id
if clear_datastore:
- for path in (datastore_path, history_path):
- if os.path.lexists(path):
- logging.info('Attempting to remove file at %s', path)
- try:
- remove(path)
- except OSError, e:
- logging.warning('Removing file failed: %s', e)
+ path = datastore_path
+ if os.path.lexists(path):
+ logging.info('Attempting to remove file at %s', path)
+ try:
+ remove(path)
+ except OSError, e:
+ logging.warning('Removing file failed: %s', e)
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
datastore = datastore_file_stub.DatastoreFileStub(
- app_id, datastore_path, history_path, require_indexes=require_indexes,
+ app_id, datastore_path, require_indexes=require_indexes,
trusted=trusted)
apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore)
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver_index.py Fri Oct 23 11:17:07 2009 -0700
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver_index.py Fri Oct 23 13:54:11 2009 -0500
@@ -76,13 +76,6 @@
res = []
for (kind, ancestor, props), count in sorted(indexes.iteritems()):
res.append('')
- if count == 0:
- message = '# Unused in query history -- copied from input.'
- elif count == 1:
- message = '# Used once in query history.'
- else:
- message = '# Used %d times in query history.' % count
- res.append(message)
res.append(datastore_index.IndexYamlForQuery(kind, ancestor, props))
res.append('')