Merge with Sverre's accidental head.
authorPawel Solyga <Pawel.Solyga@gmail.com>
Mon, 18 May 2009 14:22:45 +0200
changeset 2320 5bf22cd44c19
parent 2319 3eee2308f1dd (diff)
parent 2308 c058fce7c0b4 (current diff)
child 2321 b8d460ebcddd
Merge with Sverre's accidental head.
--- a/.hgignore	Thu May 14 22:40:39 2009 +0200
+++ b/.hgignore	Mon May 18 14:22:45 2009 +0200
@@ -13,3 +13,4 @@
 tests/.coverage
 *.git
 .gitignore
+.DS_Store
--- a/AUTHORS	Thu May 14 22:40:39 2009 +0200
+++ b/AUTHORS	Mon May 18 14:22:45 2009 +0200
@@ -3,8 +3,8 @@
 # included, it must be RFC 2821 compliant (properly quoted, escaped, etc.).
 # Lines beginning with # can be stripped from the file, as can blank lines.
 
-"Madhusudan.C.S" <madhusudancs@gmail.com>
 "Haoyu Bai" <baihaoyu@gmail.com>
+"Madhusudan C.S" <madhusudancs@gmail.com>
 "Augie Fackler" <durin42@gmail.com>
 "Mario Ferraro" <fadinlight@gmail.com>
 "Dmitri Gaskin" <dmitrig01@gmail.com>
--- a/app/soc/logic/helper/notifications.py	Thu May 14 22:40:39 2009 +0200
+++ b/app/soc/logic/helper/notifications.py	Mon May 18 14:22:45 2009 +0200
@@ -191,6 +191,7 @@
       'scope_path': to_user.link_id
   }
 
+  import soc.logic.models.notification
   key_name = model_logic.notification.logic.getKeyNameFromFields(fields)
 
   # create and put a new notification in the datastore
@@ -204,6 +205,8 @@
       notification_entity: Notification about which the message should be sent
   """
 
+  import soc.views.models.notification
+
   # create the url to show this notification
   notification_url = "http://%(host)s%(index)s" % {
       'host' : os.environ['HTTP_HOST'],
--- a/app/soc/views/helper/lists.py	Thu May 14 22:40:39 2009 +0200
+++ b/app/soc/views/helper/lists.py	Mon May 18 14:22:45 2009 +0200
@@ -22,6 +22,7 @@
   '"Pawel Solyga" <pawel.solyga@gmail.com>',
   ]
 
+import logging
 
 from soc.logic import dicts
 from soc.logic.models.user import logic as user_logic
@@ -60,8 +61,6 @@
 
 OFFSET_KEY = 'offset_%d'
 LIMIT_KEY = 'limit_%d'
-OFFSET_LINKID_KEY = 'offset_linkid_%d'
-REVERSE_DIRECTION_KEY = 'reverse_sort_direction_%d'
 
 
 def makeOffsetKey(limit_idx):
@@ -72,14 +71,6 @@
   return LIMIT_KEY % limit_idx
 
 
-def makeOffsetLinkidKey(limit_idx):
-  return OFFSET_LINKID_KEY % limit_idx
-
-
-def makeReverseDirectionKey(limit_idx):
-  return REVERSE_DIRECTION_KEY % limit_idx
-
-
 def getListParameters(request, list_index):
   """Retrieves, converts and validates values for one list
 
@@ -119,44 +110,30 @@
   else:
     limit = min(DEF_MAX_PAGINATION, limit)
 
-  result = dict(limit=limit, offset=offset)
-  offset_linkid = request.GET.get(makeOffsetLinkidKey(list_index),
-                                  '')
-  # TODO(dbentley): URL unescape
-  result['offset_linkid'] = offset_linkid
-
-  reverse_direction = makeReverseDirectionKey(list_index) in request.GET
-  result['reverse_direction'] = reverse_direction
-
-  return result
+  return dict(limit=limit, offset=offset)
 
 
-class LinkCreator(object):
-  """A way to create links for a page.
+def generateLinkFromGetArgs(request, offset_and_limits):
+  """Constructs the get args for the url.
   """
-  def __init__(self, request, list_idx, limit):
-    self.path = request.path
-    self.base_params = dict(
-        i for i in request.GET.iteritems() if
-        i[0].startswith('offset_') or i[0].startswith('limit_'))
-    self.idx = list_idx
-    self.base_params[makeLimitKey(self.idx)] = limit
+
+  args = ["%s=%s" % (k, v) for k, v in offset_and_limits.iteritems()]
+  link_suffix = '?' + '&'.join(args)
+
+  return request.path + link_suffix
+
 
-  def create(self, offset_linkid=None, export=False, reverse_direction=False):
-    params = self.base_params.copy()
-    if offset_linkid is not None:
-      # TODO(dbentley): URL encode
-      if offset_linkid == '':
-        try:
-          del params[makeOffsetLinkidKey(self.idx)]
-        except KeyError:
-          pass
-      else:
-        params[makeOffsetLinkidKey(self.idx)]=offset_linkid
-    if reverse_direction:
-      params[makeReverseDirectionKey(self.idx)]=True
-    link_suffix = '&'.join('%s=%s' % (k, v) for k, v in params.iteritems())
-    return '%s?%s' % (self.path, link_suffix)
+def generateLinkForRequest(request, base_params, updated_params):
+  """Create a link to the same page as request but with different params
+
+  Params:
+    request: the request for the page
+    base_params: the base parameters
+    updated_params: the parameters to update
+  """
+  params = base_params.copy()
+  params.update(updated_params)
+  return generateLinkFromGetArgs(request, params)
 
 
 def getListContent(request, params, filter=None, order=None,
@@ -197,36 +174,13 @@
   # as we only use this logic for getForFields, which is never overridden
   logic = params['logic']
 
-  limit_key = makeLimitKey(idx)
-  # offset_key = makeOffsetKey(idx)
-  # offset_linkid_key = makeOffsetLinkidKey(idx) 
-  # reverse_direction_key = makeReverseDirectionKey(idx)
+  limit_key, offset_key = makeLimitKey(idx), makeOffsetKey(idx)
 
   list_params = getListParameters(request, idx)
   limit, offset = list_params['limit'], list_params['offset']
-  offset_linkid = list_params['offset_linkid']
-  reverse_direction = list_params['reverse_direction']
   pagination_form = makePaginationForm(request, list_params['limit'],
                                        limit_key)
 
-  if offset_linkid:
-    if filter is None:
-      filter = {}
-
-    if reverse_direction:
-      filter['link_id <'] = offset_linkid
-    else:
-      filter['link_id >'] = offset_linkid
-
-    if order is None:
-      order = []
-    if reverse_direction:
-      order.append('-link_id')
-    else:
-      order.append('link_id')
-
-
-
   # Fetch one more to see if there should be a 'next' link
   data = logic.getForFields(filter=filter, limit=limit+1, offset=offset,
                             order=order)
@@ -235,61 +189,46 @@
     return None
 
   more = len(data) > limit
-  if reverse_direction:
-    data.reverse()
+
+  if more:
+    del data[limit:]
+
+  newest = next = prev = export_link = ''
+
+  base_params = dict(i for i in request.GET.iteritems() if
+                     i[0].startswith('offset_') or i[0].startswith('limit_'))
+
+  if params.get('list_key_order'):
+    export_link = generateLinkForRequest(request, base_params, {'export' : idx})
 
   if more:
-    if reverse_direction:
-      data = data[1:]
-    else:
-      data = data[:limit]
-
-  should_have_next_link = True
-  if not reverse_direction and not more:
-    should_have_next_link = False
+    # TODO(dbentley): here we need to implement a new field "last_key"
+    next = generateLinkForRequest(request, base_params, {offset_key : offset+limit,
+                                                         limit_key : limit})
 
-  # Calculating should_have_previous_link is tricky. It's possible we could
-  # be creating a previous link to a page that would have 0 entities.
-  # That would be suboptimal; what's a better way?
-  should_have_previous_link = False
-  if offset_linkid:
-    should_have_previous_link = True
-  if reverse_direction and not more:
-    should_have_previous_link = False
+  if offset > 0:
+    # TODO(dbentley): here we need to implement previous in the good way.
+    prev = generateLinkForRequest(request, base_params,
+                                  { offset_key : max(0, offset-limit),
+                                    limit_key : limit })
 
-  if data:
-    first_displayed_item = data[0]
-    last_displayed_item = data[-1]
-  else:
-    class Dummy(object):
-      pass
-    first_displayed_item = last_displayed_item = Dummy()
-    first_displayed_item.link_id = None
-  newest = next = prev = export_link = ''
-
-  link_creator = LinkCreator(request, idx, limit)
+  if offset > limit:
+    # Having a link to the first doesn't make sense on the first page (we're on
+    # it).  It also doesn't make sense on the second page (because the first
+    # page is the previous page).
 
-  if params.get('list_key_order'):
-    export_link = link_creator.create(export=True)
-
-  if should_have_next_link:
-    next = link_creator.create(offset_linkid=last_displayed_item.link_id)
-
-  if should_have_previous_link:
-    prev = link_creator.create(offset_linkid=first_displayed_item.link_id,
-                               reverse_direction=True)
-
-  newest = link_creator.create(offset_linkid='')
-
-  # TODO(dbentley): add a "last" link (which is now possible because we can
-  # query with a reverse linkid sorting
+    # NOTE(dbentley): I personally disagree that it's simpler to do that way,
+    # because sometimes you want to go to the first page without having to
+    # consider what page you're on now.
+    newest = generateLinkForGetArgs(request, base_params, {offset_key : 0,
+                                                           limit_key : limit})
 
   content = {
       'idx': idx,
       'data': data,
       'export': export_link,
-      'first': first_displayed_item.link_id,
-      'last': last_displayed_item.link_id,
+      'first': offset+1,
+      'last': len(data) > 1 and offset+len(data) or None,
       'logic': logic,
       'limit': limit,
       'newest': newest,
--- a/tests/run.py	Thu May 14 22:40:39 2009 +0200
+++ b/tests/run.py	Mon May 18 14:22:45 2009 +0200
@@ -9,6 +9,7 @@
                os.path.join(appengine_location, 'lib', 'django'),
                os.path.join(appengine_location, 'lib', 'webob'),
                os.path.join(appengine_location, 'lib', 'yaml', 'lib'),
+               os.path.join(appengine_location, 'lib', 'antlr3'),
                appengine_location,
                os.path.join(HERE, 'app'),
                os.path.join(HERE, 'thirdparty', 'coverage'),
@@ -32,7 +33,9 @@
   def afterTest(self, test):
     from google.appengine.api import apiproxy_stub_map
     datastore = apiproxy_stub_map.apiproxy.GetStub('datastore')
-    datastore.Clear()
+    # clear datastore iff one is available
+    if datastore is not None:
+      datastore.Clear()
 
 
 def main():
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test_functional.py	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,100 @@
+#!/usr/bin/python2.5
+#
+# Copyright 2009 the Melange authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+__authors__ = [
+  '"Matthew Wilkes" <matthew@matthewwilkes.co.uk>',
+  ]
+
+
+from gaeftest.test import FunctionalTestCase
+
+from zope.testbrowser import browser
+
+import os.path
+
+
+class MelangeFunctionalTestCase(FunctionalTestCase):
+  """A base class for all functional tests in Melange.
+
+  Tests MUST NOT be defined here, but the superclass requires a path
+  attribute that points to the app.yaml.  Utility functions MAY be
+  declared here to be shared by all functional tests, but any
+  overridden unittest methods MUST call the superclass version.
+  """
+
+  path = os.path.abspath(__file__+"/../../app/app.yaml")
+
+
+class TestBranding(MelangeFunctionalTestCase):
+  """Tests that ensure Melange properly displays attribution.
+
+  Other notices, as required by the project and/or law, are tested
+  here as well.
+  """
+
+  def test_attribution(self):
+    """Ensure that the front page asserts that it is a Melange app.
+    """
+
+    tb = browser.Browser()
+    tb.open("http://127.0.0.1:8080/site/show/site")
+
+    self.assertTrue("Powered by Melange" in tb.contents)
+
+
+class TestLogin(MelangeFunctionalTestCase):
+  """Tests that check the login system is functioning correctly.
+
+  Also tests that users go through the correct registration workflow.
+  """
+
+  def test_firstLogin(self):
+    """Ensure that new users are prompted to create a profile.
+
+    Also test that only new users are prompted.
+    """
+
+    tb = browser.Browser()
+    tb.open("http://127.0.0.1:8080")
+
+    tb.getLink("Sign in").click()
+    self.assertTrue("login" in tb.url)
+
+    # fill in dev_appserver login form
+    tb.getForm().getControl("Email").value = "newuser@example.com"
+    tb.getForm().getControl("Login").click()
+
+    self.assertTrue(tb.url.endswith("/show/site"))
+    self.assertTrue('Please create <a href="/user/create_profile">'
+        'User Profile</a> in order to view this page' in tb.contents)
+
+    tb.getLink("User Profile").click()
+
+    # fill in the user profile
+    cp = tb.getForm(action="create_profile")
+    cp.getControl(name="link_id").value = "exampleuser"
+    cp.getControl(name="name").value = "Example user"
+    cp.getControl("Save").click()
+
+    # if all is well, we go to the edit page
+    self.assertTrue("edit_profile" in tb.url)
+
+    tb.open("http://127.0.0.1:8080")
+
+    # call to action no longer on front page
+    self.assertFalse('Please create <a href="/user/create_profile">'
+        'User Profile</a> in order to view this page' in tb.contents)
\ No newline at end of file
--- a/thirdparty/google_appengine/RELEASE_NOTES	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/RELEASE_NOTES	Mon May 18 14:22:45 2009 +0200
@@ -3,6 +3,40 @@
 
 App Engine Python SDK - Release Notes
 
+Version 1.2.2 - April 22, 2009
+==============================
+
+  - New quota API which returns the CPU usage of the current request.
+      from google.appengine.api import quota
+      cpu_usage_so_far = quota.get_request_cpu_usage()
+  - Urlfetch fetch now has support for user configurable deadlines.
+      http://code.google.com/p/googleappengine/issues/detail?id=79
+  - Urlfetch in the SDK allows the Accept-Encoding header to match App Engine.
+      http://code.google.com/p/googleappengine/issues/detail?id=1071
+  - urllib now supports HTTPS in addition to HTTP
+      http://code.google.com/p/googleappengine/issues/detail?id=1156
+  - Datastore indexes on single properties can now be disabled by setting
+    indexed=False on the property constructor.
+  - Datastore now supports Key-only queries, using either SELECT __key__ or
+    or db.Query(Model, keys_only=True)
+  - Fixed issues with Datastore IN filters and sorting: sort order is now
+    correct, and can be used with __key__.
+      http://code.google.com/p/googleappengine/issues/detail?id=1100
+      http://code.google.com/p/googleappengine/issues/detail?id=1016
+  - Cron supports additional time specification formats.
+      http://code.google.com/p/googleappengine/issues/detail?id=1261
+  - Fixed an issue in the dev_appserver admin console datastore viewer
+    (/_ah/admin/datastore) with sorting columns containing None types.
+      http://code.google.com/p/googleappengine/issues/detail?id=1007
+  - Bulk Loader improvements:  New appcfg download_data command.
+    Better backoff support and debugging output for long requests.
+  - New --vhost flag on appcfg.py request_logs command to select logs for
+    a particular host.
+  - Python _ast module is now available for import
+      http://code.google.com/p/googleappengine/issues/detail?id=779
+  - Fixed issue with the color argument of the Images API composite method.
+
+
 Version 1.2.1 - April 13, 2009
 =============================
 
@@ -33,12 +67,12 @@
       http://code.google.com/p/googleappengine/issues/detail?id=1017
 
 
-
 Version 1.2.0 - March 24, 2009
 ==============================
   - Cron support. Appcfg.py will upload the schedule to App Engine.
       The dev_appserver console at /_ah/admin describes your schedule but does
       not automatically run scheduled jobs. Learn more at
+      http://code.google.com/appengine/docs/python/config/cron.html
   - New allow_skipped_files flag in dev_appserver to allow it to read files
     which are not available in App Engine.
       http://code.google.com/p/googleappengine/issues/detail?id=550
--- a/thirdparty/google_appengine/VERSION	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/VERSION	Mon May 18 14:22:45 2009 +0200
@@ -1,3 +1,3 @@
-release: "1.2.1"
-timestamp: 1238791978
+release: "1.2.2"
+timestamp: 1240438569
 api_versions: ['1']
--- a/thirdparty/google_appengine/dev_appserver.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/dev_appserver.py	Mon May 18 14:22:45 2009 +0200
@@ -21,21 +21,17 @@
 import sys
 
 if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Error: Very old versions of Python are not supported. Please '
-                   'use version 2.5.\n')
+  sys.stderr.write('Very old versions of Python are not supported. Please '
+                   'use version 2.5 or greater.\n')
   sys.exit(1)
 version_tuple = tuple(sys.version_info[:2])
 if version_tuple < (2, 4):
   sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5.\n' % version_tuple)
+                   'version 2.5 or greater.\n' % version_tuple)
   sys.exit(1)
 if version_tuple == (2, 4):
   sys.stderr.write('Warning: Python 2.4 is not supported; this program may '
-                   'break. Please use version 2.5.\n')
-if version_tuple > (2, 5):
-  sys.stderr.write('Error: Python %d.%d and is not supported; '
-		   'Please use version 2.5, not greater.\n' % version_tuple)
-  sys.exit(1)
+                   'break. Please use version 2.5 or greater.\n')
 
 DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
 SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
--- a/thirdparty/google_appengine/google/appengine/api/apiproxy_rpc.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/apiproxy_rpc.py	Mon May 18 14:22:45 2009 +0200
@@ -89,11 +89,12 @@
     assert self.__state is RPC.IDLE, ('RPC for %s.%s has already been started' %
                                       (self.package, self.call))
     assert self.callback is None or callable(self.callback)
-
     self._MakeCallImpl()
 
   def Wait(self):
     """Waits on the API call associated with this RPC."""
+    assert self.__state is not RPC.IDLE, ('RPC for %s.%s has not been started' %
+                                          (self.package, self.call))
     rpc_completed = self._WaitImpl()
 
     assert rpc_completed, ('RPC for %s.%s was not completed, and no other ' +
--- a/thirdparty/google_appengine/google/appengine/api/datastore.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore.py	Mon May 18 14:22:45 2009 +0200
@@ -274,7 +274,8 @@
   Includes read-only accessors for app id, kind, and primary key. Also
   provides dictionary-style access to properties.
   """
-  def __init__(self, kind, parent=None, _app=None, name=None):
+  def __init__(self, kind, parent=None, _app=None, name=None,
+               unindexed_properties=[]):
     """Constructor. Takes the kind and transaction root, which cannot be
     changed after the entity is constructed, and an optional parent. Raises
     BadArgumentError or BadKeyError if kind is invalid or parent is not an
@@ -287,6 +288,9 @@
       parent: Entity or Key
       # if provided, this entity's name.
       name: string
+      # if provided, a sequence of property names that should not be indexed
+      # by the built-in single property indices.
+      unindexed_properties: list or tuple of strings
     """
     ref = entity_pb.Reference()
     _app = datastore_types.ResolveAppId(_app)
@@ -311,6 +315,15 @@
         raise datastore_errors.BadValueError('name cannot begin with a digit')
       last_path.set_name(name.encode('utf-8'))
 
+    unindexed_properties, multiple = NormalizeAndTypeCheck(unindexed_properties, basestring)
+    if not multiple:
+      raise datastore_errors.BadArgumentError(
+        'unindexed_properties must be a sequence; received %s (a %s).' %
+        (unindexed_properties, typename(unindexed_properties)))
+    for prop in unindexed_properties:
+      datastore_types.ValidateProperty(prop, None)
+    self.__unindexed_properties = frozenset(unindexed_properties)
+
     self.__key = Key._FromPb(ref)
 
   def app(self):
@@ -336,13 +349,17 @@
     return self.key().parent()
 
   def entity_group(self):
-    """Returns this entitys's entity group as a Key.
+    """Returns this entity's entity group as a Key.
 
     Note that the returned Key will be incomplete if this is a a root entity
     and its key is incomplete.
     """
     return self.key().entity_group()
 
+  def unindexed_properties(self):
+    """Returns this entity's unindexed properties, as a frozenset of strings."""
+    return self.__unindexed_properties
+
   def __setitem__(self, name, value):
     """Implements the [] operator. Used to set property value(s).
 
@@ -492,7 +509,8 @@
       if isinstance(sample, list):
         sample = values[0]
 
-      if isinstance(sample, datastore_types._RAW_PROPERTY_TYPES):
+      if (isinstance(sample, datastore_types._RAW_PROPERTY_TYPES) or
+          name in self.__unindexed_properties):
         pb.raw_property_list().extend(properties)
       else:
         pb.property_list().extend(properties)
@@ -530,7 +548,10 @@
       assert last_path.has_name()
       assert last_path.name()
 
-    e = Entity(unicode(last_path.type().decode('utf-8')))
+    unindexed_properties = [p.name() for p in pb.raw_property_list()]
+
+    e = Entity(unicode(last_path.type().decode('utf-8')),
+               unindexed_properties=unindexed_properties)
     ref = e.__key._Key__reference
     ref.CopyFrom(pb.key())
 
@@ -538,11 +559,6 @@
 
     for prop_list in (pb.property_list(), pb.raw_property_list()):
       for prop in prop_list:
-        if not prop.has_multiple():
-          raise datastore_errors.Error(
-            'Property %s is corrupt in the datastore; it\'s missing the '
-            'multiple valued field.' % prop.name())
-
         try:
           value = datastore_types.FromPropertyPb(prop)
         except (AssertionError, AttributeError, TypeError, ValueError), e:
@@ -684,7 +700,7 @@
   __inequality_prop = None
   __inequality_count = 0
 
-  def __init__(self, kind, filters={}, _app=None):
+  def __init__(self, kind, filters={}, _app=None, keys_only=False):
     """Constructor.
 
     Raises BadArgumentError if kind is not a string. Raises BadValueError or
@@ -692,9 +708,10 @@
 
     Args:
       # kind is required. filters is optional; if provided, it's used
-      # as an initial set of property filters.
+      # as an initial set of property filters. keys_only defaults to False.
       kind: string
       filters: dict
+      keys_only: boolean
     """
     datastore_types.ValidateString(kind, 'kind',
                                    datastore_errors.BadArgumentError)
@@ -705,6 +722,7 @@
     self.update(filters)
 
     self.__app = datastore_types.ResolveAppId(_app)
+    self.__keys_only = keys_only
 
   def Order(self, *orderings):
     """Specify how the query results should be sorted.
@@ -847,6 +865,10 @@
     self.__ancestor.CopyFrom(key._Key__reference)
     return self
 
+  def IsKeysOnly(self):
+    """Returns True if this query is keys only, false otherwise."""
+    return self.__keys_only
+
   def Run(self):
     """Runs this query.
 
@@ -890,7 +912,7 @@
         raise datastore_errors.NeedIndexError(
           str(exc) + '\nThis query needs this index:\n' + yaml)
 
-    return Iterator._FromPb(result.cursor())
+    return Iterator._FromPb(result)
 
   def Get(self, limit, offset=0):
     """Fetches and returns a maximum number of results from the query.
@@ -1120,6 +1142,7 @@
     pb = datastore_pb.Query()
 
     pb.set_kind(self.__kind.encode('utf-8'))
+    pb.set_keys_only(bool(self.__keys_only))
     if self.__app:
       pb.set_app(self.__app.encode('utf-8'))
     if limit is not None:
@@ -1171,6 +1194,11 @@
 
   This class is actually a subclass of datastore.Query as it is intended to act
   like a normal Query object (supporting the same interface).
+
+  Does not support keys only queries, since it needs whole entities in order
+  to merge sort them. (That's not true if there are no sort orders, or if the
+  sort order is on __key__, but allowing keys only queries in those cases, but
+  not in others, would be confusing.)
   """
 
   def __init__(self, bound_queries, orderings):
@@ -1179,6 +1207,12 @@
           'Cannot satisfy query -- too many subqueries (max: %d, got %d).'
           ' Probable cause: too many IN/!= filters in query.' %
           (MAX_ALLOWABLE_QUERIES, len(bound_queries)))
+
+    for query in bound_queries:
+      if query.IsKeysOnly():
+        raise datastore_errors.BadQueryError(
+            'MultiQuery does not support keys_only.')
+
     self.__bound_queries = bound_queries
     self.__orderings = orderings
 
@@ -1294,7 +1328,7 @@
       return 0
 
     def __GetValueForId(self, sort_order_entity, identifier, sort_order):
-      value = sort_order_entity.__entity[identifier]
+      value = _GetPropertyValue(sort_order_entity.__entity, identifier)
       entity_key = sort_order_entity.__entity.key()
       if (entity_key, identifier) in self.__min_max_value_cache:
         value = self.__min_max_value_cache[(entity_key, identifier)]
@@ -1479,10 +1513,11 @@
   > for person in it:
   >   print 'Hi, %s!' % person['name']
   """
-  def __init__(self, cursor):
+  def __init__(self, cursor, keys_only=False):
     self.__cursor = cursor
     self.__buffer = []
     self.__more_results = True
+    self.__keys_only = keys_only
 
   def _Next(self, count):
     """Returns the next result(s) of the query.
@@ -1490,31 +1525,29 @@
     Not intended to be used by application developers. Use the python
     iterator protocol instead.
 
-    This method returns the next entities from the list of resulting
-    entities that matched the query. If the query specified a sort
-    order, entities are returned in that order. Otherwise, the order
-    is undefined.
+    This method returns the next entities or keys from the list of matching
+    results. If the query specified a sort order, results are returned in that
+    order. Otherwise, the order is undefined.
 
-    The argument specifies the number of entities to return. If it's
-    greater than the number of remaining entities, all of the
-    remaining entities are returned. In that case, the length of the
-    returned list will be smaller than count.
+    The argument specifies the number of results to return. If it's greater
+    than the number of remaining results, all of the remaining results are
+    returned. In that case, the length of the returned list will be smaller
+    than count.
 
-    There is an internal buffer for use with the next() method.  If
-    this buffer is not empty, up to 'count' values are removed from
-    this buffer and returned.  It's best not to mix _Next() and
-    next().
+    There is an internal buffer for use with the next() method. If this buffer
+    is not empty, up to 'count' values are removed from this buffer and
+    returned. It's best not to mix _Next() and next().
 
-    The results are always returned as a list. If there are no results
-    left, an empty list is returned.
+    The results are always returned as a list. If there are no results left,
+    an empty list is returned.
 
     Args:
-      # the number of entities to return; must be >= 1
+      # the number of results to return; must be >= 1
       count: int or long
 
     Returns:
-      # a list of entities
-      [Entity, ...]
+      # a list of entities or keys
+      [Entity or Key, ...]
     """
     if not isinstance(count, (int, long)) or count <= 0:
       raise datastore_errors.BadArgumentError(
@@ -1539,8 +1572,10 @@
 
     self.__more_results = result.more_results()
 
-    ret = [Entity._FromPb(r) for r in result.result_list()]
-    return ret
+    if self.__keys_only:
+      return [Key._FromPb(e.key()) for e in result.result_list()]
+    else:
+      return [Entity._FromPb(e) for e in result.result_list()]
 
   _BUFFER_SIZE = 20
 
@@ -1570,18 +1605,16 @@
   @staticmethod
   def _FromPb(pb):
     """Static factory method. Returns the Iterator representation of the given
-    protocol buffer (datastore_pb.Cursor). Not intended to be used by
-    application developers. Enforced by not hiding the datastore_pb classes.
+    protocol buffer (datastore_pb.QueryResult). Not intended to be used by
+    application developers. Enforced by hiding the datastore_pb classes.
 
     Args:
-      # a protocol buffer Cursor
-      pb: datastore_pb.Cursor
+      pb: datastore_pb.QueryResult
 
     Returns:
-      # the Iterator representation of the argument
       Iterator
     """
-    return Iterator(pb.cursor())
+    return Iterator(pb.cursor().cursor(), keys_only=pb.keys_only())
 
 
 class _Transaction(object):
@@ -1920,6 +1953,28 @@
   return key
 
 
+def _GetPropertyValue(entity, property):
+  """Returns an entity's value for a given property name.
+
+  Handles special properties like __key__ as well as normal properties.
+
+  Args:
+    entity: datastore.Entity
+    property: str; the property name
+
+  Returns:
+    property value. For __key__, a datastore_types.Key.
+
+  Raises:
+    KeyError, if the entity does not have the given property.
+  """
+  if property in datastore_types._SPECIAL_PROPERTIES:
+    assert property == datastore_types._KEY_SPECIAL_PROPERTY
+    return entity.key()
+  else:
+    return entity[property]
+
+
 def _AddOrAppend(dictionary, key, value):
   """Adds the value to the existing values in the dictionary, if any.
 
--- a/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Mon May 18 14:22:45 2009 +0200
@@ -98,6 +98,44 @@
     self.native = datastore.Entity._FromPb(entity)
 
 
+class _Cursor(object):
+  """A query cursor.
+
+  Public properties:
+    cursor: the integer cursor
+    count: the original total number of results
+    keys_only: whether the query is keys_only
+  """
+  def __init__(self, results, keys_only):
+    """Constructor.
+
+    Args:
+      # the query results, in order, such that pop(0) is the next result
+      results: list of entity_pb.EntityProto
+      keys_only: integer
+    """
+    self.__results = results
+    self.count = len(results)
+    self.keys_only = keys_only
+    self.cursor = id(self)
+
+  def PopulateQueryResult(self, result, count):
+    """Populates a QueryResult with this cursor and the given number of results.
+
+    Args:
+      result: datastore_pb.QueryResult
+      count: integer
+    """
+    result.mutable_cursor().set_cursor(self.cursor)
+    result.set_keys_only(self.keys_only)
+
+    results_pbs = [r._ToPb() for r in self.__results[:count]]
+    result.result_list().extend(results_pbs)
+    del self.__results[:count]
+
+    result.set_more_results(len(self.__results) > 0)
+
+
 class DatastoreFileStub(apiproxy_stub.APIProxyStub):
   """ Persistent stub for the Python datastore API.
 
@@ -189,11 +227,9 @@
     self.__query_history = {}
 
     self.__next_id = 1
-    self.__next_cursor = 1
     self.__next_tx_handle = 1
     self.__next_index_id = 1
     self.__id_lock = threading.Lock()
-    self.__cursor_lock = threading.Lock()
     self.__tx_handle_lock = threading.Lock()
     self.__index_id_lock = threading.Lock()
     self.__tx_lock = threading.Lock()
@@ -581,6 +617,22 @@
                  datastore_pb.Query_Filter.EQUAL:                 '==',
                  }
 
+    def has_prop_indexed(entity, prop):
+      """Returns True if prop is in the entity and is indexed."""
+      if prop in datastore_types._SPECIAL_PROPERTIES:
+        return True
+      elif prop in entity.unindexed_properties():
+        return False
+
+      values = entity.get(prop, [])
+      if not isinstance(values, (tuple, list)):
+        values = [values]
+
+      for value in values:
+        if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
+          return True
+      return False
+
     for filt in query.filter_list():
       assert filt.op() != datastore_pb.Query_Filter.IN
 
@@ -590,20 +642,24 @@
       filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
                          for filter_prop in filt.property_list()]
 
-      def passes(entity):
-        """ Returns True if the entity passes the filter, False otherwise. """
-        if prop in datastore_types._SPECIAL_PROPERTIES:
-          entity_vals = self.__GetSpecialPropertyValue(entity, prop)
-        else:
-          entity_vals = entity.get(prop, [])
+      def passes_filter(entity):
+        """Returns True if the entity passes the filter, False otherwise.
+
+        The filter being evaluated is filt, the current filter that we're on
+        in the list of filters in the query.
+        """
+        if not has_prop_indexed(entity, prop):
+          return False
+
+        try:
+          entity_vals = datastore._GetPropertyValue(entity, prop)
+        except KeyError:
+          entity_vals = []
 
         if not isinstance(entity_vals, list):
           entity_vals = [entity_vals]
 
         for fixed_entity_val in entity_vals:
-          if type(fixed_entity_val) in datastore_types._RAW_PROPERTY_TYPES:
-            continue
-
           for filter_val in filter_val_list:
             fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
               fixed_entity_val.__class__)
@@ -627,22 +683,7 @@
 
         return False
 
-      results = filter(passes, results)
-
-    def has_prop_indexed(entity, prop):
-      """Returns True if prop is in the entity and is not a raw property, or
-      is a special property."""
-      if prop in datastore_types._SPECIAL_PROPERTIES:
-        return True
-
-      values = entity.get(prop, [])
-      if not isinstance(values, (tuple, list)):
-        values = [values]
-
-      for value in values:
-        if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
-          return True
-      return False
+      results = filter(passes_filter, results)
 
     for order in query.order_list():
       prop = order.property().decode('utf-8')
@@ -658,17 +699,13 @@
 
         reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
 
-        if prop in datastore_types._SPECIAL_PROPERTIES:
-          a_val = self.__GetSpecialPropertyValue(a, prop)
-          b_val = self.__GetSpecialPropertyValue(b, prop)
-        else:
-          a_val = a[prop]
-          if isinstance(a_val, list):
-            a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
+        a_val = datastore._GetPropertyValue(a, prop)
+        if isinstance(a_val, list):
+          a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
 
-          b_val = b[prop]
-          if isinstance(b_val, list):
-            b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
+        b_val = datastore._GetPropertyValue(b, prop)
+        if isinstance(b_val, list):
+          b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
 
         cmped = order_compare_properties(a_val, b_val)
 
@@ -725,39 +762,27 @@
       self.__query_history[clone] = 1
     self.__WriteHistory()
 
-    self.__cursor_lock.acquire()
-    cursor = self.__next_cursor
-    self.__next_cursor += 1
-    self.__cursor_lock.release()
-    self.__queries[cursor] = (results, len(results))
-
-    query_result.mutable_cursor().set_cursor(cursor)
-    query_result.set_more_results(len(results) > 0)
+    cursor = _Cursor(results, query.keys_only())
+    self.__queries[cursor.cursor] = cursor
+    cursor.PopulateQueryResult(query_result, 0)
 
   def _Dynamic_Next(self, next_request, query_result):
-    cursor = next_request.cursor().cursor()
+    cursor_handle = next_request.cursor().cursor()
 
     try:
-      results, orig_count = self.__queries[cursor]
+      cursor = self.__queries[cursor_handle]
     except KeyError:
-      raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
-                                             'Cursor %d not found' % cursor)
+      raise apiproxy_errors.ApplicationError(
+          datastore_pb.Error.BAD_REQUEST, 'Cursor %d not found' % cursor_handle)
 
-    count = next_request.count()
-
-    results_pb = [r._ToPb() for r in results[:count]]
-    query_result.result_list().extend(results_pb)
-    del results[:count]
-
-    query_result.set_more_results(len(results) > 0)
+    cursor.PopulateQueryResult(query_result, next_request.count())
 
   def _Dynamic_Count(self, query, integer64proto):
     self.__ValidateAppId(query.app())
     query_result = datastore_pb.QueryResult()
     self._Dynamic_RunQuery(query, query_result)
     cursor = query_result.cursor().cursor()
-    results, count = self.__queries[cursor]
-    integer64proto.set_value(count)
+    integer64proto.set_value(self.__queries[cursor].count)
     del self.__queries[cursor]
 
   def _Dynamic_BeginTransaction(self, request, transaction):
@@ -945,23 +970,3 @@
           return stored_index
 
     return None
-
-  @classmethod
-  def __GetSpecialPropertyValue(cls, entity, property):
-    """Returns an entity's value for a special property.
-
-    Right now, the only special property is __key__, whose value is the
-    entity's key.
-
-    Args:
-      entity: datastore.Entity
-
-    Returns:
-      property value. For __key__, a datastore_types.Key.
-
-    Raises:
-      AssertionError, if the given property is not special.
-    """
-    assert property in datastore_types._SPECIAL_PROPERTIES
-    if property == datastore_types._KEY_SPECIAL_PROPERTY:
-      return entity.key()
--- a/thirdparty/google_appengine/google/appengine/api/images/images_stub.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/images/images_stub.py	Mon May 18 14:22:45 2009 +0200
@@ -36,6 +36,22 @@
 from google.appengine.runtime import apiproxy_errors
 
 
+def _ArgbToRgbaTuple(argb):
+  """Convert from a single ARGB value to a tuple containing RGBA.
+
+  Args:
+    argb: Signed 32 bit integer containing an ARGB value.
+
+  Returns:
+    RGBA tuple.
+  """
+  unsigned_argb = argb % 0x100000000
+  return ((unsigned_argb >> 16) & 0xFF,
+          (unsigned_argb >> 8) & 0xFF,
+          unsigned_argb & 0xFF,
+          (unsigned_argb >> 24) & 0xFF)
+
+
 class ImagesServiceStub(apiproxy_stub.APIProxyStub):
   """Stub version of images API to be used with the dev_appserver."""
 
@@ -60,10 +76,8 @@
     """
     width = request.canvas().width()
     height = request.canvas().height()
-    color = request.canvas().color() % 0x100000000
-    reordered_color = int((color & 0xff000000) | ((color >> 16) & 0xff) |
-                          (color & 0xff00) | (color & 0xff) << 16)
-    canvas = Image.new("RGBA", (width, height), reordered_color)
+    color = _ArgbToRgbaTuple(request.canvas().color())
+    canvas = Image.new("RGBA", (width, height), color)
     sources = []
     if (not request.canvas().width() or request.canvas().width() > 4000 or
         not request.canvas().height() or request.canvas().height() > 4000):
--- a/thirdparty/google_appengine/google/appengine/api/mail_service_pb.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/mail_service_pb.py	Mon May 18 14:22:45 2009 +0200
@@ -22,7 +22,7 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
-from google.appengine.api.api_base_pb import VoidProto
+from google.appengine.api.api_base_pb import *
 class MailServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
--- a/thirdparty/google_appengine/google/appengine/api/memcache/memcache_service_pb.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/memcache_service_pb.py	Mon May 18 14:22:45 2009 +0200
@@ -22,7 +22,7 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
-from google.appengine.api.api_base_pb import VoidProto
+from google.appengine.api.api_base_pb import *
 class MemcacheServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/quota.py	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Access to quota usage for this application."""
+
+
+
+
+try:
+  from google3.apphosting.runtime import _apphosting_runtime___python__apiproxy
+except ImportError:
+  _apphosting_runtime___python__apiproxy = None
+
+def get_request_cpu_usage():
+  """Get the amount of CPU used so far for the current request.
+
+  Returns the number of megacycles used so far for the current
+  request. Does not include CPU used by API calls.
+
+  Does nothing when used in the dev_appserver.
+  """
+
+  if _apphosting_runtime___python__apiproxy:
+    return _apphosting_runtime___python__apiproxy.get_request_cpu_usage()
+  return 0
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch.py	Mon May 18 14:22:45 2009 +0200
@@ -30,6 +30,7 @@
 import urllib2
 import urlparse
 
+from google.appengine.api import apiproxy_rpc
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.api import urlfetch_service_pb
 from google.appengine.api.urlfetch_errors import *
@@ -186,13 +187,29 @@
   return False
 
 
+def __create_rpc(deadline=None, callback=None):
+  """DO NOT USE.  WILL CHANGE AND BREAK YOUR CODE.
+
+  Creates an RPC object for use with the urlfetch API.
+
+  Args:
+    deadline: deadline in seconds for the operation.
+    callback: callable to invoke on completion.
+
+  Returns:
+    A _URLFetchRPC object.
+  """
+  return _URLFetchRPC(deadline, callback)
+
+
 def fetch(url, payload=None, method=GET, headers={}, allow_truncated=False,
-          follow_redirects=True):
+          follow_redirects=True, deadline=None):
   """Fetches the given HTTP URL, blocking until the result is returned.
 
   Other optional parameters are:
      method: GET, POST, HEAD, PUT, or DELETE
-     payload: POST or PUT payload (implies method is not GET, HEAD, or DELETE)
+     payload: POST or PUT payload (implies method is not GET, HEAD, or DELETE).
+       this is ignored if the method is not POST or PUT.
      headers: dictionary of HTTP headers to send with the request
      allow_truncated: if true, truncate large responses and return them without
        error. otherwise, ResponseTooLargeError will be thrown when a response is
@@ -204,6 +221,7 @@
        information.  If false, you see the HTTP response yourself,
        including the 'Location' header, and redirects are not
        followed.
+     deadline: deadline in seconds for the operation.
 
   We use a HTTP/1.1 compliant proxy to fetch the result.
 
@@ -218,73 +236,173 @@
   of the returned structure, so HTTP errors like 404 do not result in an
   exception.
   """
-  if isinstance(method, basestring):
-    method = method.upper()
-  method = _URL_STRING_MAP.get(method, method)
-  if method not in _VALID_METHODS:
-    raise InvalidMethodError('Invalid method %s.' % str(method))
+  rpc = __create_rpc(deadline=deadline)
+  rpc.make_call(url, payload, method, headers, follow_redirects)
+  return rpc.get_result(allow_truncated)
+
+
+class _URLFetchRPC(object):
+  """A RPC object that manages the urlfetch RPC.
+
+  Its primary functions are the following:
+  1. Convert error codes to the URLFetchServiceError namespace and raise them
+     when get_result is called.
+  2. Wrap the urlfetch response with a _URLFetchResult object.
+  """
 
-  if _is_fetching_self(url, method):
-    raise InvalidURLError("App cannot fetch the same URL as the one used for "
-                          "the request.")
+  def __init__(self, deadline=None, callback=None):
+    """Construct a new url fetch RPC.
 
-  request = urlfetch_service_pb.URLFetchRequest()
-  response = urlfetch_service_pb.URLFetchResponse()
-  request.set_url(url)
+    Args:
+      deadline: deadline in seconds for the operation.
+      callback: callable to invoke on completion.
+    """
+    self.__rpc = apiproxy_stub_map.CreateRPC('urlfetch')
+    self.__rpc.deadline = deadline
+    self.__rpc.callback = callback
+    self.__called_hooks = False
+
+  def make_call(self, url, payload=None, method=GET, headers={},
+                follow_redirects=True):
+    """Executes the RPC call to fetch a given HTTP URL.
 
-  if method == GET:
-    request.set_method(urlfetch_service_pb.URLFetchRequest.GET)
-  elif method == POST:
-    request.set_method(urlfetch_service_pb.URLFetchRequest.POST)
-  elif method == HEAD:
-    request.set_method(urlfetch_service_pb.URLFetchRequest.HEAD)
-  elif method == PUT:
-    request.set_method(urlfetch_service_pb.URLFetchRequest.PUT)
-  elif method == DELETE:
-    request.set_method(urlfetch_service_pb.URLFetchRequest.DELETE)
+    See urlfetch.fetch for a thorough description of arguments.
+    """
+    assert self.__rpc.state is apiproxy_rpc.RPC.IDLE
+    if isinstance(method, basestring):
+      method = method.upper()
+    method = _URL_STRING_MAP.get(method, method)
+    if method not in _VALID_METHODS:
+      raise InvalidMethodError('Invalid method %s.' % str(method))
+
+    if _is_fetching_self(url, method):
+      raise InvalidURLError("App cannot fetch the same URL as the one used for "
+                            "the request.")
+
+    self.__request = urlfetch_service_pb.URLFetchRequest()
+    self.__response = urlfetch_service_pb.URLFetchResponse()
+    self.__result = None
+    self.__request.set_url(url)
 
-  if payload and (method == POST or method == PUT):
-    request.set_payload(payload)
+    if method == GET:
+      self.__request.set_method(urlfetch_service_pb.URLFetchRequest.GET)
+    elif method == POST:
+      self.__request.set_method(urlfetch_service_pb.URLFetchRequest.POST)
+    elif method == HEAD:
+      self.__request.set_method(urlfetch_service_pb.URLFetchRequest.HEAD)
+    elif method == PUT:
+      self.__request.set_method(urlfetch_service_pb.URLFetchRequest.PUT)
+    elif method == DELETE:
+      self.__request.set_method(urlfetch_service_pb.URLFetchRequest.DELETE)
+
+    if payload and (method == POST or method == PUT):
+      self.__request.set_payload(payload)
+
+    for key, value in headers.iteritems():
+      header_proto = self.__request.add_header()
+      header_proto.set_key(key)
+      header_proto.set_value(str(value))
+
+    self.__request.set_followredirects(follow_redirects)
+    if self.__rpc.deadline:
+      self.__request.set_deadline(self.__rpc.deadline)
+
+    apiproxy_stub_map.apiproxy.GetPreCallHooks().Call(
+        'urlfetch', 'Fetch', self.__request, self.__response)
+    self.__rpc.MakeCall('urlfetch', 'Fetch', self.__request, self.__response)
 
-  for key, value in headers.iteritems():
-    header_proto = request.add_header()
-    header_proto.set_key(key)
-    header_proto.set_value(str(value))
+  def wait(self):
+    """Waits for the urlfetch RPC to finish.  Idempotent.
+    """
+    assert self.__rpc.state is not apiproxy_rpc.RPC.IDLE
+    if self.__rpc.state is apiproxy_rpc.RPC.RUNNING:
+      self.__rpc.Wait()
+
+  def check_success(self, allow_truncated=False):
+    """Check success and convert RPC exceptions to urlfetch exceptions.
+
+    This method waits for the RPC if it has not yet finished, and calls the
+    post-call hooks on the first invocation.
 
-  request.set_followredirects(follow_redirects)
+    Args:
+      allow_truncated: if False, an error is raised if the response was
+        truncated.
+
+    Raises:
+      InvalidURLError if the url was invalid.
+      DownloadError if there was a problem fetching the url.
+      ResponseTooLargeError if the response was either truncated (and
+        allow_truncated is false) or if it was too big for us to download.
+    """
+    assert self.__rpc.state is not apiproxy_rpc.RPC.IDLE
+    if self.__rpc.state is apiproxy_rpc.RPC.RUNNING:
+      self.wait()
 
-  try:
-    apiproxy_stub_map.MakeSyncCall('urlfetch', 'Fetch', request, response)
-  except apiproxy_errors.ApplicationError, e:
-    if (e.application_error ==
-        urlfetch_service_pb.URLFetchServiceError.INVALID_URL):
-      raise InvalidURLError(str(e))
-    if (e.application_error ==
-        urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR):
-      raise DownloadError(str(e))
-    if (e.application_error ==
-        urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR):
-      raise DownloadError(str(e))
-    if (e.application_error ==
-        urlfetch_service_pb.URLFetchServiceError.RESPONSE_TOO_LARGE):
-      raise ResponseTooLargeError(None)
-    if (e.application_error ==
-        urlfetch_service_pb.URLFetchServiceError.DEADLINE_EXCEEDED):
-      raise DownloadError(str(e))
-    raise e
-  result = _URLFetchResult(response)
+    try:
+      self.__rpc.CheckSuccess()
+      if not self.__called_hooks:
+        self.__called_hooks = True
+        apiproxy_stub_map.apiproxy.GetPostCallHooks().Call(
+            'urlfetch', 'Fetch', self.__request, self.__response)
+    except apiproxy_errors.ApplicationError, e:
+      if (e.application_error ==
+          urlfetch_service_pb.URLFetchServiceError.INVALID_URL):
+        raise InvalidURLError(str(e))
+      if (e.application_error ==
+          urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR):
+        raise DownloadError(str(e))
+      if (e.application_error ==
+          urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR):
+        raise DownloadError(str(e))
+      if (e.application_error ==
+          urlfetch_service_pb.URLFetchServiceError.RESPONSE_TOO_LARGE):
+        raise ResponseTooLargeError(None)
+      if (e.application_error ==
+          urlfetch_service_pb.URLFetchServiceError.DEADLINE_EXCEEDED):
+        raise DownloadError(str(e))
+      raise e
 
-  if not allow_truncated and response.contentwastruncated():
-    raise ResponseTooLargeError(result)
+    if self.__response.contentwastruncated() and not allow_truncated:
+      raise ResponseTooLargeError(_URLFetchResult(self.__response))
+
+  def get_result(self, allow_truncated=False):
+    """Returns the RPC result or raises an exception if the rpc failed.
+
+    This method waits for the RPC if not completed, and checks success.
+
+    Args:
+      allow_truncated: if False, an error is raised if the response was
+        truncated.
 
-  return result
+    Returns:
+      The urlfetch result.
+
+    Raises:
+      Error if the rpc has not yet finished.
+      InvalidURLError if the url was invalid.
+      DownloadError if there was a problem fetching the url.
+      ResponseTooLargeError if the response was either truncated (and
+        allow_truncated is false) or if it was too big for us to download.
+    """
+    if self.__result is None:
+      self.check_success(allow_truncated)
+      self.__result = _URLFetchResult(self.__response)
+    return self.__result
+
 
 Fetch = fetch
 
 
 class _URLFetchResult(object):
-  """A Pythonic representation of our fetch response protocol buffer."""
+  """A Pythonic representation of our fetch response protocol buffer.
+  """
+
   def __init__(self, response_proto):
+    """Constructor.
+
+    Args:
+      response_proto: the URLFetchResponse proto buffer to wrap.
+    """
     self.__pb = response_proto
     self.content = response_proto.content()
     self.status_code = response_proto.statuscode()
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py	Mon May 18 14:22:45 2009 +0200
@@ -217,6 +217,8 @@
   payload_ = ""
   has_followredirects_ = 0
   followredirects_ = 1
+  has_deadline_ = 0
+  deadline_ = 0.0
 
   def __init__(self, contents=None):
     self.header_ = []
@@ -290,6 +292,19 @@
 
   def has_followredirects(self): return self.has_followredirects_
 
+  def deadline(self): return self.deadline_
+
+  def set_deadline(self, x):
+    self.has_deadline_ = 1
+    self.deadline_ = x
+
+  def clear_deadline(self):
+    if self.has_deadline_:
+      self.has_deadline_ = 0
+      self.deadline_ = 0.0
+
+  def has_deadline(self): return self.has_deadline_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -298,6 +313,7 @@
     for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
     if (x.has_payload()): self.set_payload(x.payload())
     if (x.has_followredirects()): self.set_followredirects(x.followredirects())
+    if (x.has_deadline()): self.set_deadline(x.deadline())
 
   def Equals(self, x):
     if x is self: return 1
@@ -312,6 +328,8 @@
     if self.has_payload_ and self.payload_ != x.payload_: return 0
     if self.has_followredirects_ != x.has_followredirects_: return 0
     if self.has_followredirects_ and self.followredirects_ != x.followredirects_: return 0
+    if self.has_deadline_ != x.has_deadline_: return 0
+    if self.has_deadline_ and self.deadline_ != x.deadline_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -336,6 +354,7 @@
     for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
     if (self.has_payload_): n += 1 + self.lengthString(len(self.payload_))
     if (self.has_followredirects_): n += 2
+    if (self.has_deadline_): n += 9
     return n + 2
 
   def Clear(self):
@@ -344,6 +363,7 @@
     self.clear_header()
     self.clear_payload()
     self.clear_followredirects()
+    self.clear_deadline()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(8)
@@ -360,6 +380,9 @@
     if (self.has_followredirects_):
       out.putVarInt32(56)
       out.putBoolean(self.followredirects_)
+    if (self.has_deadline_):
+      out.putVarInt32(65)
+      out.putDouble(self.deadline_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -379,6 +402,9 @@
       if tt == 56:
         self.set_followredirects(d.getBoolean())
         continue
+      if tt == 65:
+        self.set_deadline(d.getDouble())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -397,6 +423,7 @@
       cnt+=1
     if self.has_payload_: res+=prefix+("Payload: %s\n" % self.DebugFormatString(self.payload_))
     if self.has_followredirects_: res+=prefix+("FollowRedirects: %s\n" % self.DebugFormatBool(self.followredirects_))
+    if self.has_deadline_: res+=prefix+("Deadline: %s\n" % self.DebugFormat(self.deadline_))
     return res
 
   kMethod = 1
@@ -406,6 +433,7 @@
   kHeaderValue = 5
   kPayload = 6
   kFollowRedirects = 7
+  kDeadline = 8
 
   _TEXT = (
    "ErrorCode",
@@ -416,6 +444,7 @@
    "Value",
    "Payload",
    "FollowRedirects",
+   "Deadline",
   )
 
   _TYPES = (
@@ -434,6 +463,8 @@
 
    ProtocolBuffer.Encoder.NUMERIC,
 
+   ProtocolBuffer.Encoder.DOUBLE,
+
   )
 
   _STYLE = """"""
@@ -542,6 +573,10 @@
   statuscode_ = 0
   has_contentwastruncated_ = 0
   contentwastruncated_ = 0
+  has_externalbytessent_ = 0
+  externalbytessent_ = 0
+  has_externalbytesreceived_ = 0
+  externalbytesreceived_ = 0
 
   def __init__(self, contents=None):
     self.header_ = []
@@ -602,6 +637,32 @@
 
   def has_contentwastruncated(self): return self.has_contentwastruncated_
 
+  def externalbytessent(self): return self.externalbytessent_
+
+  def set_externalbytessent(self, x):
+    self.has_externalbytessent_ = 1
+    self.externalbytessent_ = x
+
+  def clear_externalbytessent(self):
+    if self.has_externalbytessent_:
+      self.has_externalbytessent_ = 0
+      self.externalbytessent_ = 0
+
+  def has_externalbytessent(self): return self.has_externalbytessent_
+
+  def externalbytesreceived(self): return self.externalbytesreceived_
+
+  def set_externalbytesreceived(self, x):
+    self.has_externalbytesreceived_ = 1
+    self.externalbytesreceived_ = x
+
+  def clear_externalbytesreceived(self):
+    if self.has_externalbytesreceived_:
+      self.has_externalbytesreceived_ = 0
+      self.externalbytesreceived_ = 0
+
+  def has_externalbytesreceived(self): return self.has_externalbytesreceived_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -609,6 +670,8 @@
     if (x.has_statuscode()): self.set_statuscode(x.statuscode())
     for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
     if (x.has_contentwastruncated()): self.set_contentwastruncated(x.contentwastruncated())
+    if (x.has_externalbytessent()): self.set_externalbytessent(x.externalbytessent())
+    if (x.has_externalbytesreceived()): self.set_externalbytesreceived(x.externalbytesreceived())
 
   def Equals(self, x):
     if x is self: return 1
@@ -621,6 +684,10 @@
       if e1 != e2: return 0
     if self.has_contentwastruncated_ != x.has_contentwastruncated_: return 0
     if self.has_contentwastruncated_ and self.contentwastruncated_ != x.contentwastruncated_: return 0
+    if self.has_externalbytessent_ != x.has_externalbytessent_: return 0
+    if self.has_externalbytessent_ and self.externalbytessent_ != x.externalbytessent_: return 0
+    if self.has_externalbytesreceived_ != x.has_externalbytesreceived_: return 0
+    if self.has_externalbytesreceived_ and self.externalbytesreceived_ != x.externalbytesreceived_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -640,6 +707,8 @@
     n += 2 * len(self.header_)
     for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
     if (self.has_contentwastruncated_): n += 2
+    if (self.has_externalbytessent_): n += 1 + self.lengthVarInt64(self.externalbytessent_)
+    if (self.has_externalbytesreceived_): n += 1 + self.lengthVarInt64(self.externalbytesreceived_)
     return n + 1
 
   def Clear(self):
@@ -647,6 +716,8 @@
     self.clear_statuscode()
     self.clear_header()
     self.clear_contentwastruncated()
+    self.clear_externalbytessent()
+    self.clear_externalbytesreceived()
 
   def OutputUnchecked(self, out):
     if (self.has_content_):
@@ -661,6 +732,12 @@
     if (self.has_contentwastruncated_):
       out.putVarInt32(48)
       out.putBoolean(self.contentwastruncated_)
+    if (self.has_externalbytessent_):
+      out.putVarInt32(56)
+      out.putVarInt64(self.externalbytessent_)
+    if (self.has_externalbytesreceived_):
+      out.putVarInt32(64)
+      out.putVarInt64(self.externalbytesreceived_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -677,6 +754,12 @@
       if tt == 48:
         self.set_contentwastruncated(d.getBoolean())
         continue
+      if tt == 56:
+        self.set_externalbytessent(d.getVarInt64())
+        continue
+      if tt == 64:
+        self.set_externalbytesreceived(d.getVarInt64())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -694,6 +777,8 @@
       res+=prefix+"}\n"
       cnt+=1
     if self.has_contentwastruncated_: res+=prefix+("ContentWasTruncated: %s\n" % self.DebugFormatBool(self.contentwastruncated_))
+    if self.has_externalbytessent_: res+=prefix+("ExternalBytesSent: %s\n" % self.DebugFormatInt64(self.externalbytessent_))
+    if self.has_externalbytesreceived_: res+=prefix+("ExternalBytesReceived: %s\n" % self.DebugFormatInt64(self.externalbytesreceived_))
     return res
 
   kContent = 1
@@ -702,6 +787,8 @@
   kHeaderKey = 4
   kHeaderValue = 5
   kContentWasTruncated = 6
+  kExternalBytesSent = 7
+  kExternalBytesReceived = 8
 
   _TEXT = (
    "ErrorCode",
@@ -711,6 +798,8 @@
    "Key",
    "Value",
    "ContentWasTruncated",
+   "ExternalBytesSent",
+   "ExternalBytesReceived",
   )
 
   _TYPES = (
@@ -727,6 +816,10 @@
 
    ProtocolBuffer.Encoder.NUMERIC,
 
+   ProtocolBuffer.Encoder.NUMERIC,
+
+   ProtocolBuffer.Encoder.NUMERIC,
+
   )
 
   _STYLE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py	Mon May 18 14:22:45 2009 +0200
@@ -51,7 +51,6 @@
 
 
 _UNTRUSTED_REQUEST_HEADERS = frozenset([
-  'accept-encoding',
   'content-length',
   'host',
   'referer',
@@ -112,13 +111,17 @@
                                                   request.header_list())
     request.clear_header()
     request.header_list().extend(sanitized_headers)
+    deadline = _API_CALL_DEADLINE
+    if request.has_deadline():
+      deadline = request.deadline()
 
     self._RetrieveURL(request.url(), payload, method,
                       request.header_list(), response,
-                      follow_redirects=request.followredirects())
+                      follow_redirects=request.followredirects(),
+                      deadline=deadline)
 
   def _RetrieveURL(self, url, payload, method, headers, response,
-                   follow_redirects=True):
+                   follow_redirects=True, deadline=_API_CALL_DEADLINE):
     """Retrieves a URL.
 
     Args:
@@ -129,6 +132,7 @@
       response: Response object
       follow_redirects: optional setting (defaulting to True) for whether or not
         we should transparently follow redirects (up to MAX_REDIRECTS)
+      deadline: Number of seconds to wait for the urlfetch to finish.
 
     Raises:
       Raises an apiproxy_errors.ApplicationError exception with FETCH_ERROR
@@ -195,7 +199,7 @@
 
         orig_timeout = socket.getdefaulttimeout()
         try:
-          socket.setdefaulttimeout(_API_CALL_DEADLINE)
+          socket.setdefaulttimeout(deadline)
           connection.request(method, full_path, payload, adjusted_headers)
           http_response = connection.getresponse()
           http_response_data = http_response.read()
@@ -238,4 +242,9 @@
       untrusted_headers: set of untrusted headers names
       headers: list of string pairs, first is header name and the second is header's value
     """
+    prohibited_headers = [h.key() for h in headers
+                          if h.key().lower() in untrusted_headers]
+    if prohibited_headers:
+      logging.warn("Stripped prohibited headers from URLFetch request: %s",
+                   prohibited_headers)
     return (h for h in headers if h.key().lower() not in untrusted_headers)
--- a/thirdparty/google_appengine/google/appengine/api/user_service_pb.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/user_service_pb.py	Mon May 18 14:22:45 2009 +0200
@@ -22,7 +22,7 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
-from google.appengine.api.api_base_pb import StringProto
+from google.appengine.api.api_base_pb import *
 class UserServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
--- a/thirdparty/google_appengine/google/appengine/cron/GrocLexer.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/cron/GrocLexer.py	Mon May 18 14:22:45 2009 +0200
@@ -23,39 +23,40 @@
 HIDDEN = BaseRecognizer.HIDDEN
 
 THIRD=12
-SEPTEMBER=34
+SEPTEMBER=35
 FOURTH=13
 SECOND=11
-WEDNESDAY=20
-NOVEMBER=36
-SATURDAY=23
-JULY=32
-APRIL=29
+WEDNESDAY=21
+NOVEMBER=37
+SATURDAY=24
+JULY=33
+APRIL=30
 DIGITS=8
-OCTOBER=35
-MAY=30
+OCTOBER=36
+MAY=31
 EVERY=6
-FEBRUARY=27
-MONDAY=18
-SUNDAY=24
-JUNE=31
+FEBRUARY=28
+MONDAY=19
+SUNDAY=25
+DAY=18
+JUNE=32
 OF=4
-MARCH=28
+MARCH=29
 EOF=-1
-JANUARY=26
-MONTH=25
-FRIDAY=22
+JANUARY=27
+MONTH=26
+FRIDAY=23
 MINUTES=17
 FIFTH=14
 TIME=5
-WS=39
-QUARTER=38
-THURSDAY=21
+WS=40
+QUARTER=39
+THURSDAY=22
 COMMA=9
-DECEMBER=37
-AUGUST=33
+DECEMBER=38
+AUGUST=34
 DIGIT=7
-TUESDAY=19
+TUESDAY=20
 HOURS=16
 FOURTH_OR_FIFTH=15
 FIRST=10
@@ -100,10 +101,10 @@
             if LA1 == 48:
                 LA1_1 = self.input.LA(2)
 
-                if ((48 <= LA1_1 <= 57)) :
+                if (LA1_1 == 58) :
+                    alt1 = 1
+                elif ((48 <= LA1_1 <= 57)) :
                     alt1 = 2
-                elif (LA1_1 == 58) :
-                    alt1 = 1
                 else:
                     nvae = NoViableAltException("", 1, 1, self.input)
 
@@ -112,10 +113,10 @@
             elif LA1 == 49:
                 LA1_2 = self.input.LA(2)
 
-                if ((48 <= LA1_2 <= 57)) :
+                if (LA1_2 == 58) :
+                    alt1 = 1
+                elif ((48 <= LA1_2 <= 57)) :
                     alt1 = 3
-                elif (LA1_2 == 58) :
-                    alt1 = 1
                 else:
                     nvae = NoViableAltException("", 1, 2, self.input)
 
@@ -124,7 +125,7 @@
             elif LA1 == 50:
                 LA1_3 = self.input.LA(2)
 
-                if ((48 <= LA1_3 <= 52)) :
+                if ((48 <= LA1_3 <= 51)) :
                     alt1 = 4
                 elif (LA1_3 == 58) :
                     alt1 = 1
@@ -169,7 +170,7 @@
                 pass
                 pass
                 self.match(50)
-                self.matchRange(48, 52)
+                self.matchRange(48, 51)
 
 
 
@@ -436,6 +437,27 @@
 
 
 
+    def mDAY(self, ):
+
+        try:
+            _type = DAY
+            _channel = DEFAULT_CHANNEL
+
+            pass
+            self.match("day")
+
+
+
+            self._state.type = _type
+            self._state.channel = _channel
+
+        finally:
+
+            pass
+
+
+
+
     def mMONDAY(self, ):
 
         try:
@@ -1330,7 +1352,7 @@
 
 
     def mTokens(self):
-        alt25 = 36
+        alt25 = 37
         alt25 = self.dfa25.predict(self.input)
         if alt25 == 1:
             pass
@@ -1369,146 +1391,151 @@
 
         elif alt25 == 8:
             pass
-            self.mMONDAY()
+            self.mDAY()
 
 
         elif alt25 == 9:
             pass
-            self.mTUESDAY()
+            self.mMONDAY()
 
 
         elif alt25 == 10:
             pass
-            self.mWEDNESDAY()
+            self.mTUESDAY()
 
 
         elif alt25 == 11:
             pass
-            self.mTHURSDAY()
+            self.mWEDNESDAY()
 
 
         elif alt25 == 12:
             pass
-            self.mFRIDAY()
+            self.mTHURSDAY()
 
 
         elif alt25 == 13:
             pass
-            self.mSATURDAY()
+            self.mFRIDAY()
 
 
         elif alt25 == 14:
             pass
-            self.mSUNDAY()
+            self.mSATURDAY()
 
 
         elif alt25 == 15:
             pass
-            self.mJANUARY()
+            self.mSUNDAY()
 
 
         elif alt25 == 16:
             pass
-            self.mFEBRUARY()
+            self.mJANUARY()
 
 
         elif alt25 == 17:
             pass
-            self.mMARCH()
+            self.mFEBRUARY()
 
 
         elif alt25 == 18:
             pass
-            self.mAPRIL()
+            self.mMARCH()
 
 
         elif alt25 == 19:
             pass
-            self.mMAY()
+            self.mAPRIL()
 
 
         elif alt25 == 20:
             pass
-            self.mJUNE()
+            self.mMAY()
 
 
         elif alt25 == 21:
             pass
-            self.mJULY()
+            self.mJUNE()
 
 
         elif alt25 == 22:
             pass
-            self.mAUGUST()
+            self.mJULY()
 
 
         elif alt25 == 23:
             pass
-            self.mSEPTEMBER()
+            self.mAUGUST()
 
 
         elif alt25 == 24:
             pass
-            self.mOCTOBER()
+            self.mSEPTEMBER()
 
 
         elif alt25 == 25:
             pass
-            self.mNOVEMBER()
+            self.mOCTOBER()
 
 
         elif alt25 == 26:
             pass
-            self.mDECEMBER()
+            self.mNOVEMBER()
 
 
         elif alt25 == 27:
             pass
-            self.mMONTH()
+            self.mDECEMBER()
 
 
         elif alt25 == 28:
             pass
-            self.mQUARTER()
+            self.mMONTH()
 
 
         elif alt25 == 29:
             pass
-            self.mEVERY()
+            self.mQUARTER()
 
 
         elif alt25 == 30:
             pass
-            self.mHOURS()
+            self.mEVERY()
 
 
         elif alt25 == 31:
             pass
-            self.mMINUTES()
+            self.mHOURS()
 
 
         elif alt25 == 32:
             pass
-            self.mCOMMA()
+            self.mMINUTES()
 
 
         elif alt25 == 33:
             pass
-            self.mOF()
+            self.mCOMMA()
 
 
         elif alt25 == 34:
             pass
-            self.mWS()
+            self.mOF()
 
 
         elif alt25 == 35:
             pass
-            self.mDIGIT()
+            self.mWS()
 
 
         elif alt25 == 36:
             pass
+            self.mDIGIT()
+
+
+        elif alt25 == 37:
+            pass
             self.mDIGITS()
 
 
@@ -1519,73 +1546,74 @@
 
 
     DFA25_eot = DFA.unpack(
-        u"\1\uffff\4\27\2\uffff\1\27\1\uffff\2\27\16\uffff\1\36\1\uffff\2"
-        u"\36\31\uffff\1\74\6\uffff"
+        u"\1\uffff\4\30\2\uffff\1\30\1\uffff\2\30\14\uffff\1\36\3\uffff\2"
+        u"\36\33\uffff\1\76\6\uffff"
         )
 
     DFA25_eof = DFA.unpack(
-        u"\75\uffff"
+        u"\77\uffff"
         )
 
     DFA25_min = DFA.unpack(
-        u"\1\11\4\60\1\145\1\141\1\60\1\150\2\60\1\141\1\uffff\1\141\1\160"
-        u"\1\143\11\uffff\1\72\1\uffff\2\72\3\uffff\1\146\3\uffff\1\143\3"
-        u"\uffff\1\151\2\uffff\1\156\1\162\2\uffff\1\154\6\uffff\1\164\6"
+        u"\1\11\4\60\1\145\1\141\1\60\1\150\2\60\2\141\1\uffff\1\141\1\160"
+        u"\1\143\6\uffff\1\72\3\uffff\2\72\3\uffff\1\146\3\uffff\1\143\3"
+        u"\uffff\1\151\4\uffff\1\156\1\162\2\uffff\1\154\6\uffff\1\164\6"
         u"\uffff"
         )
 
     DFA25_max = DFA.unpack(
-        u"\1\167\1\72\1\163\1\156\2\162\1\165\1\164\1\165\1\164\1\72\1\157"
-        u"\1\uffff\2\165\1\146\11\uffff\1\72\1\uffff\2\72\3\uffff\1\162\3"
-        u"\uffff\1\160\3\uffff\1\165\2\uffff\1\156\1\171\2\uffff\1\156\6"
-        u"\uffff\1\164\6\uffff"
+        u"\1\167\1\72\1\163\1\156\2\162\1\165\1\164\1\165\1\164\1\72\1\145"
+        u"\1\157\1\uffff\2\165\1\146\6\uffff\1\72\3\uffff\2\72\3\uffff\1"
+        u"\162\3\uffff\1\160\3\uffff\1\165\4\uffff\1\156\1\171\2\uffff\1"
+        u"\156\6\uffff\1\164\6\uffff"
         )
 
     DFA25_accept = DFA.unpack(
-        u"\14\uffff\1\12\3\uffff\1\31\1\32\1\34\1\35\1\36\1\40\1\42\1\43"
-        u"\1\1\1\uffff\1\2\2\uffff\1\3\1\44\1\4\1\uffff\1\7\1\14\1\20\1\uffff"
-        u"\1\15\1\16\1\5\1\uffff\1\11\1\6\2\uffff\1\37\1\17\1\uffff\1\22"
-        u"\1\26\1\30\1\41\1\27\1\13\1\uffff\1\21\1\23\1\24\1\25\1\33\1\10"
+        u"\15\uffff\1\13\3\uffff\1\32\1\35\1\36\1\37\1\41\1\43\1\uffff\1"
+        u"\44\1\1\1\2\2\uffff\1\3\1\45\1\4\1\uffff\1\7\1\15\1\21\1\uffff"
+        u"\1\16\1\17\1\5\1\uffff\1\12\1\6\1\10\1\33\2\uffff\1\40\1\20\1\uffff"
+        u"\1\23\1\27\1\31\1\42\1\30\1\14\1\uffff\1\22\1\24\1\25\1\26\1\34"
+        u"\1\11"
         )
 
     DFA25_special = DFA.unpack(
-        u"\75\uffff"
+        u"\77\uffff"
         )
 
 
     DFA25_transition = [
         DFA.unpack(u"\2\26\2\uffff\1\26\22\uffff\1\26\13\uffff\1\25\3\uffff"
-        u"\1\1\1\2\1\3\1\4\1\7\1\11\4\12\47\uffff\1\16\2\uffff\1\21\1\23"
-        u"\1\5\1\uffff\1\24\1\uffff\1\15\2\uffff\1\13\1\20\1\17\1\uffff\1"
-        u"\22\1\uffff\1\6\1\10\2\uffff\1\14"),
-        DFA.unpack(u"\12\31\1\30"),
-        DFA.unpack(u"\12\33\1\30\70\uffff\1\32"),
-        DFA.unpack(u"\5\34\5\36\1\30\63\uffff\1\35"),
-        DFA.unpack(u"\12\36\1\30\67\uffff\1\37"),
+        u"\1\1\1\2\1\3\1\4\1\7\1\11\4\12\47\uffff\1\17\2\uffff\1\13\1\23"
+        u"\1\5\1\uffff\1\24\1\uffff\1\16\2\uffff\1\14\1\21\1\20\1\uffff\1"
+        u"\22\1\uffff\1\6\1\10\2\uffff\1\15"),
+        DFA.unpack(u"\12\27\1\31"),
+        DFA.unpack(u"\12\33\1\31\70\uffff\1\32"),
+        DFA.unpack(u"\4\34\6\36\1\31\63\uffff\1\35"),
+        DFA.unpack(u"\12\36\1\31\67\uffff\1\37"),
         DFA.unpack(u"\1\43\3\uffff\1\40\5\uffff\1\41\2\uffff\1\42"),
         DFA.unpack(u"\1\45\3\uffff\1\44\17\uffff\1\46"),
-        DFA.unpack(u"\12\36\1\30\71\uffff\1\47"),
+        DFA.unpack(u"\12\36\1\31\71\uffff\1\47"),
         DFA.unpack(u"\1\50\14\uffff\1\51"),
-        DFA.unpack(u"\12\36\1\30\71\uffff\1\52"),
-        DFA.unpack(u"\12\36\1\30"),
-        DFA.unpack(u"\1\54\7\uffff\1\55\5\uffff\1\53"),
+        DFA.unpack(u"\12\36\1\31\71\uffff\1\52"),
+        DFA.unpack(u"\12\36\1\31"),
+        DFA.unpack(u"\1\53\3\uffff\1\54"),
+        DFA.unpack(u"\1\56\7\uffff\1\57\5\uffff\1\55"),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\56\23\uffff\1\57"),
-        DFA.unpack(u"\1\60\4\uffff\1\61"),
-        DFA.unpack(u"\1\62\2\uffff\1\63"),
+        DFA.unpack(u"\1\60\23\uffff\1\61"),
+        DFA.unpack(u"\1\62\4\uffff\1\63"),
+        DFA.unpack(u"\1\64\2\uffff\1\65"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
+        DFA.unpack(u"\1\31"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\30"),
-        DFA.unpack(u""),
-        DFA.unpack(u"\1\30"),
-        DFA.unpack(u"\1\30"),
+        DFA.unpack(u"\1\31"),
+        DFA.unpack(u"\1\31"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
@@ -1593,25 +1621,27 @@
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\35\14\uffff\1\64"),
+        DFA.unpack(u"\1\35\14\uffff\1\66"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\37\13\uffff\1\65"),
+        DFA.unpack(u"\1\37\13\uffff\1\67"),
+        DFA.unpack(u""),
+        DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\66"),
-        DFA.unpack(u"\1\67\6\uffff\1\70"),
+        DFA.unpack(u"\1\70"),
+        DFA.unpack(u"\1\71\6\uffff\1\72"),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\72\1\uffff\1\71"),
+        DFA.unpack(u"\1\74\1\uffff\1\73"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\73"),
+        DFA.unpack(u"\1\75"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
--- a/thirdparty/google_appengine/google/appengine/cron/GrocParser.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/cron/GrocParser.py	Mon May 18 14:22:45 2009 +0200
@@ -32,39 +32,40 @@
 HIDDEN = BaseRecognizer.HIDDEN
 
 THIRD=12
-SEPTEMBER=34
+SEPTEMBER=35
 FOURTH=13
 SECOND=11
-WEDNESDAY=20
-NOVEMBER=36
-SATURDAY=23
-JULY=32
-APRIL=29
+WEDNESDAY=21
+NOVEMBER=37
+SATURDAY=24
+JULY=33
+APRIL=30
 DIGITS=8
-OCTOBER=35
-MAY=30
+OCTOBER=36
+MAY=31
 EVERY=6
-FEBRUARY=27
-MONDAY=18
-SUNDAY=24
-JUNE=31
-MARCH=28
+FEBRUARY=28
+MONDAY=19
+SUNDAY=25
+JUNE=32
+DAY=18
+MARCH=29
 OF=4
 EOF=-1
-JANUARY=26
-MONTH=25
-FRIDAY=22
+JANUARY=27
+MONTH=26
+FRIDAY=23
 FIFTH=14
 MINUTES=17
 TIME=5
-WS=39
-QUARTER=38
-THURSDAY=21
+WS=40
+QUARTER=39
+THURSDAY=22
 COMMA=9
-DECEMBER=37
-AUGUST=33
+DECEMBER=38
+AUGUST=34
 DIGIT=7
-TUESDAY=19
+TUESDAY=20
 HOURS=16
 FIRST=10
 FOURTH_OR_FIFTH=15
@@ -72,10 +73,10 @@
 tokenNames = [
     "<invalid>", "<EOR>", "<DOWN>", "<UP>",
     "OF", "TIME", "EVERY", "DIGIT", "DIGITS", "COMMA", "FIRST", "SECOND",
-    "THIRD", "FOURTH", "FIFTH", "FOURTH_OR_FIFTH", "HOURS", "MINUTES", "MONDAY",
-    "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY", "SUNDAY",
-    "MONTH", "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE", "JULY",
-    "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "QUARTER",
+    "THIRD", "FOURTH", "FIFTH", "FOURTH_OR_FIFTH", "HOURS", "MINUTES", "DAY",
+    "MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY",
+    "SUNDAY", "MONTH", "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE",
+    "JULY", "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "QUARTER",
     "WS"
 ]
 
@@ -95,6 +96,17 @@
         Parser.__init__(self, input, state)
 
 
+        self.dfa3 = self.DFA3(
+            self, 3,
+            eot = self.DFA3_eot,
+            eof = self.DFA3_eof,
+            min = self.DFA3_min,
+            max = self.DFA3_max,
+            accept = self.DFA3_accept,
+            special = self.DFA3_special,
+            transition = self.DFA3_transition
+            )
+
 
 
 
@@ -160,7 +172,7 @@
 
                     if ((DIGIT <= LA1_1 <= DIGITS)) :
                         alt1 = 2
-                    elif ((MONDAY <= LA1_1 <= SUNDAY)) :
+                    elif ((DAY <= LA1_1 <= SUNDAY)) :
                         alt1 = 1
                     else:
                         nvae = NoViableAltException("", 1, 1, self.input)
@@ -214,57 +226,77 @@
             try:
                 pass
                 pass
-                pass
-                pass
-                pass
-                self._state.following.append(self.FOLLOW_ordinals_in_specifictime69)
-                self.ordinals()
+                alt3 = 2
+                alt3 = self.dfa3.predict(self.input)
+                if alt3 == 1:
+                    pass
+                    pass
+                    pass
+                    self._state.following.append(self.FOLLOW_ordinals_in_specifictime69)
+                    self.ordinals()
+
+                    self._state.following.pop()
+                    self._state.following.append(self.FOLLOW_weekdays_in_specifictime71)
+                    self.weekdays()
+
+                    self._state.following.pop()
+
+
+
+                    self.match(self.input, OF, self.FOLLOW_OF_in_specifictime74)
+                    alt2 = 2
+                    LA2_0 = self.input.LA(1)
 
-                self._state.following.pop()
-                self._state.following.append(self.FOLLOW_weekdays_in_specifictime71)
-                self.weekdays()
+                    if ((MONTH <= LA2_0 <= DECEMBER)) :
+                        alt2 = 1
+                    elif ((FIRST <= LA2_0 <= THIRD) or LA2_0 == QUARTER) :
+                        alt2 = 2
+                    else:
+                        nvae = NoViableAltException("", 2, 0, self.input)
+
+                        raise nvae
 
-                self._state.following.pop()
+                    if alt2 == 1:
+                        pass
+                        self._state.following.append(self.FOLLOW_monthspec_in_specifictime77)
+                        self.monthspec()
+
+                        self._state.following.pop()
+
+
+                    elif alt2 == 2:
+                        pass
+                        self._state.following.append(self.FOLLOW_quarterspec_in_specifictime79)
+                        self.quarterspec()
+
+                        self._state.following.pop()
 
 
 
 
 
 
-                self.match(self.input, OF, self.FOLLOW_OF_in_specifictime75)
-                alt2 = 2
-                LA2_0 = self.input.LA(1)
+
 
-                if ((MONTH <= LA2_0 <= DECEMBER)) :
-                    alt2 = 1
-                elif ((FIRST <= LA2_0 <= THIRD) or LA2_0 == QUARTER) :
-                    alt2 = 2
-                else:
-                    nvae = NoViableAltException("", 2, 0, self.input)
-
-                    raise nvae
-
-                if alt2 == 1:
+                elif alt3 == 2:
+                    pass
                     pass
-                    self._state.following.append(self.FOLLOW_monthspec_in_specifictime78)
-                    self.monthspec()
+                    self._state.following.append(self.FOLLOW_ordinals_in_specifictime96)
+                    self.ordinals()
 
                     self._state.following.pop()
-
-
-                elif alt2 == 2:
-                    pass
-                    self._state.following.append(self.FOLLOW_quarterspec_in_specifictime80)
-                    self.quarterspec()
+                    self._state.following.append(self.FOLLOW_weekdays_in_specifictime98)
+                    self.weekdays()
 
                     self._state.following.pop()
+                    self.month_set = set(range(1,13))
 
 
 
 
 
 
-                TIME1=self.match(self.input, TIME, self.FOLLOW_TIME_in_specifictime93)
+                TIME1=self.match(self.input, TIME, self.FOLLOW_TIME_in_specifictime112)
                 self.time_string = TIME1.text
 
 
@@ -294,7 +326,7 @@
             try:
                 pass
                 pass
-                self.match(self.input, EVERY, self.FOLLOW_EVERY_in_interval112)
+                self.match(self.input, EVERY, self.FOLLOW_EVERY_in_interval131)
                 intervalnum = self.input.LT(1)
                 if (DIGIT <= self.input.LA(1) <= DIGITS):
                     self.input.consume()
@@ -308,7 +340,7 @@
 
                 self.interval_mins = int(intervalnum.text)
 
-                self._state.following.append(self.FOLLOW_period_in_interval138)
+                self._state.following.append(self.FOLLOW_period_in_interval157)
                 period2 = self.period()
 
                 self._state.following.pop()
@@ -341,43 +373,43 @@
         try:
             try:
                 pass
-                alt4 = 2
-                LA4_0 = self.input.LA(1)
+                alt5 = 2
+                LA5_0 = self.input.LA(1)
 
-                if (LA4_0 == EVERY) :
-                    alt4 = 1
-                elif ((FIRST <= LA4_0 <= FOURTH_OR_FIFTH)) :
-                    alt4 = 2
+                if (LA5_0 == EVERY) :
+                    alt5 = 1
+                elif ((FIRST <= LA5_0 <= FOURTH_OR_FIFTH)) :
+                    alt5 = 2
                 else:
-                    nvae = NoViableAltException("", 4, 0, self.input)
+                    nvae = NoViableAltException("", 5, 0, self.input)
 
                     raise nvae
 
-                if alt4 == 1:
+                if alt5 == 1:
                     pass
-                    self.match(self.input, EVERY, self.FOLLOW_EVERY_in_ordinals157)
+                    self.match(self.input, EVERY, self.FOLLOW_EVERY_in_ordinals176)
                     self.ordinal_set = self.ordinal_set.union(allOrdinals)
 
 
-                elif alt4 == 2:
+                elif alt5 == 2:
                     pass
                     pass
-                    self._state.following.append(self.FOLLOW_ordinal_in_ordinals173)
+                    self._state.following.append(self.FOLLOW_ordinal_in_ordinals192)
                     self.ordinal()
 
                     self._state.following.pop()
                     while True:
-                        alt3 = 2
-                        LA3_0 = self.input.LA(1)
+                        alt4 = 2
+                        LA4_0 = self.input.LA(1)
 
-                        if (LA3_0 == COMMA) :
-                            alt3 = 1
+                        if (LA4_0 == COMMA) :
+                            alt4 = 1
 
 
-                        if alt3 == 1:
+                        if alt4 == 1:
                             pass
-                            self.match(self.input, COMMA, self.FOLLOW_COMMA_in_ordinals176)
-                            self._state.following.append(self.FOLLOW_ordinal_in_ordinals178)
+                            self.match(self.input, COMMA, self.FOLLOW_COMMA_in_ordinals195)
+                            self._state.following.append(self.FOLLOW_ordinal_in_ordinals197)
                             self.ordinal()
 
                             self._state.following.pop()
@@ -489,30 +521,58 @@
         try:
             try:
                 pass
-                pass
-                self._state.following.append(self.FOLLOW_weekday_in_weekdays261)
-                self.weekday()
+                alt7 = 2
+                LA7_0 = self.input.LA(1)
+
+                if (LA7_0 == DAY) :
+                    alt7 = 1
+                elif ((MONDAY <= LA7_0 <= SUNDAY)) :
+                    alt7 = 2
+                else:
+                    nvae = NoViableAltException("", 7, 0, self.input)
 
-                self._state.following.pop()
-                while True:
-                    alt5 = 2
-                    LA5_0 = self.input.LA(1)
+                    raise nvae
+
+                if alt7 == 1:
+                    pass
+                    self.match(self.input, DAY, self.FOLLOW_DAY_in_weekdays280)
 
-                    if (LA5_0 == COMMA) :
-                        alt5 = 1
+                    self.weekday_set = set([self.ValueOf(SUNDAY), self.ValueOf(MONDAY),
+                            self.ValueOf(TUESDAY), self.ValueOf(WEDNESDAY),
+                            self.ValueOf(THURSDAY), self.ValueOf(FRIDAY),
+                            self.ValueOf(SATURDAY), self.ValueOf(SUNDAY)])
+
 
 
-                    if alt5 == 1:
-                        pass
-                        self.match(self.input, COMMA, self.FOLLOW_COMMA_in_weekdays264)
-                        self._state.following.append(self.FOLLOW_weekday_in_weekdays266)
-                        self.weekday()
+                elif alt7 == 2:
+                    pass
+                    pass
+                    self._state.following.append(self.FOLLOW_weekday_in_weekdays288)
+                    self.weekday()
 
-                        self._state.following.pop()
+                    self._state.following.pop()
+                    while True:
+                        alt6 = 2
+                        LA6_0 = self.input.LA(1)
+
+                        if (LA6_0 == COMMA) :
+                            alt6 = 1
 
 
-                    else:
-                        break
+                        if alt6 == 1:
+                            pass
+                            self.match(self.input, COMMA, self.FOLLOW_COMMA_in_weekdays291)
+                            self._state.following.append(self.FOLLOW_weekday_in_weekdays293)
+                            self.weekday()
+
+                            self._state.following.pop()
+
+
+                        else:
+                            break
+
+
+
 
 
 
@@ -573,21 +633,21 @@
         try:
             try:
                 pass
-                alt6 = 2
-                LA6_0 = self.input.LA(1)
+                alt8 = 2
+                LA8_0 = self.input.LA(1)
 
-                if (LA6_0 == MONTH) :
-                    alt6 = 1
-                elif ((JANUARY <= LA6_0 <= DECEMBER)) :
-                    alt6 = 2
+                if (LA8_0 == MONTH) :
+                    alt8 = 1
+                elif ((JANUARY <= LA8_0 <= DECEMBER)) :
+                    alt8 = 2
                 else:
-                    nvae = NoViableAltException("", 6, 0, self.input)
+                    nvae = NoViableAltException("", 8, 0, self.input)
 
                     raise nvae
 
-                if alt6 == 1:
+                if alt8 == 1:
                     pass
-                    self.match(self.input, MONTH, self.FOLLOW_MONTH_in_monthspec344)
+                    self.match(self.input, MONTH, self.FOLLOW_MONTH_in_monthspec373)
 
                     self.month_set = self.month_set.union(set([
                         self.ValueOf(JANUARY), self.ValueOf(FEBRUARY), self.ValueOf(MARCH),
@@ -598,9 +658,9 @@
 
 
 
-                elif alt6 == 2:
+                elif alt8 == 2:
                     pass
-                    self._state.following.append(self.FOLLOW_months_in_monthspec354)
+                    self._state.following.append(self.FOLLOW_months_in_monthspec383)
                     self.months()
 
                     self._state.following.pop()
@@ -628,22 +688,22 @@
             try:
                 pass
                 pass
-                self._state.following.append(self.FOLLOW_month_in_months371)
+                self._state.following.append(self.FOLLOW_month_in_months400)
                 self.month()
 
                 self._state.following.pop()
                 while True:
-                    alt7 = 2
-                    LA7_0 = self.input.LA(1)
+                    alt9 = 2
+                    LA9_0 = self.input.LA(1)
 
-                    if (LA7_0 == COMMA) :
-                        alt7 = 1
+                    if (LA9_0 == COMMA) :
+                        alt9 = 1
 
 
-                    if alt7 == 1:
+                    if alt9 == 1:
                         pass
-                        self.match(self.input, COMMA, self.FOLLOW_COMMA_in_months374)
-                        self._state.following.append(self.FOLLOW_month_in_months376)
+                        self.match(self.input, COMMA, self.FOLLOW_COMMA_in_months403)
+                        self._state.following.append(self.FOLLOW_month_in_months405)
                         self.month()
 
                         self._state.following.pop()
@@ -709,37 +769,37 @@
         try:
             try:
                 pass
-                alt8 = 2
-                LA8_0 = self.input.LA(1)
+                alt10 = 2
+                LA10_0 = self.input.LA(1)
 
-                if (LA8_0 == QUARTER) :
-                    alt8 = 1
-                elif ((FIRST <= LA8_0 <= THIRD)) :
-                    alt8 = 2
+                if (LA10_0 == QUARTER) :
+                    alt10 = 1
+                elif ((FIRST <= LA10_0 <= THIRD)) :
+                    alt10 = 2
                 else:
-                    nvae = NoViableAltException("", 8, 0, self.input)
+                    nvae = NoViableAltException("", 10, 0, self.input)
 
                     raise nvae
 
-                if alt8 == 1:
+                if alt10 == 1:
                     pass
-                    self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec468)
+                    self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec497)
 
                     self.month_set = self.month_set.union(set([
                         self.ValueOf(JANUARY), self.ValueOf(APRIL), self.ValueOf(JULY),
                         self.ValueOf(OCTOBER)]))
 
 
-                elif alt8 == 2:
+                elif alt10 == 2:
                     pass
                     pass
-                    self._state.following.append(self.FOLLOW_quarter_ordinals_in_quarterspec480)
+                    self._state.following.append(self.FOLLOW_quarter_ordinals_in_quarterspec509)
                     self.quarter_ordinals()
 
                     self._state.following.pop()
-                    self.match(self.input, MONTH, self.FOLLOW_MONTH_in_quarterspec482)
-                    self.match(self.input, OF, self.FOLLOW_OF_in_quarterspec484)
-                    self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec486)
+                    self.match(self.input, MONTH, self.FOLLOW_MONTH_in_quarterspec511)
+                    self.match(self.input, OF, self.FOLLOW_OF_in_quarterspec513)
+                    self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec515)
 
 
 
@@ -767,22 +827,22 @@
             try:
                 pass
                 pass
-                self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals505)
+                self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals534)
                 self.month_of_quarter_ordinal()
 
                 self._state.following.pop()
                 while True:
-                    alt9 = 2
-                    LA9_0 = self.input.LA(1)
+                    alt11 = 2
+                    LA11_0 = self.input.LA(1)
 
-                    if (LA9_0 == COMMA) :
-                        alt9 = 1
+                    if (LA11_0 == COMMA) :
+                        alt11 = 1
 
 
-                    if alt9 == 1:
+                    if alt11 == 1:
                         pass
-                        self.match(self.input, COMMA, self.FOLLOW_COMMA_in_quarter_ordinals508)
-                        self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals510)
+                        self.match(self.input, COMMA, self.FOLLOW_COMMA_in_quarter_ordinals537)
+                        self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals539)
                         self.month_of_quarter_ordinal()
 
                         self._state.following.pop()
@@ -850,43 +910,88 @@
 
 
 
+    DFA3_eot = DFA.unpack(
+        u"\13\uffff"
+        )
+
+    DFA3_eof = DFA.unpack(
+        u"\13\uffff"
+        )
+
+    DFA3_min = DFA.unpack(
+        u"\1\6\1\22\1\11\2\4\1\12\2\uffff\1\23\1\11\1\4"
+        )
+
+    DFA3_max = DFA.unpack(
+        u"\1\17\2\31\1\5\1\11\1\17\2\uffff\2\31\1\11"
+        )
+
+    DFA3_accept = DFA.unpack(
+        u"\6\uffff\1\1\1\2\3\uffff"
+        )
+
+    DFA3_special = DFA.unpack(
+        u"\13\uffff"
+        )
+
+
+    DFA3_transition = [
+        DFA.unpack(u"\1\1\3\uffff\6\2"),
+        DFA.unpack(u"\1\3\7\4"),
+        DFA.unpack(u"\1\5\10\uffff\1\3\7\4"),
+        DFA.unpack(u"\1\6\1\7"),
+        DFA.unpack(u"\1\6\1\7\3\uffff\1\10"),
+        DFA.unpack(u"\6\11"),
+        DFA.unpack(u""),
+        DFA.unpack(u""),
+        DFA.unpack(u"\7\12"),
+        DFA.unpack(u"\1\5\10\uffff\1\3\7\4"),
+        DFA.unpack(u"\1\6\1\7\3\uffff\1\10")
+    ]
+
+
+    DFA3 = DFA
+
 
     FOLLOW_specifictime_in_timespec44 = frozenset([1])
     FOLLOW_interval_in_timespec48 = frozenset([1])
-    FOLLOW_ordinals_in_specifictime69 = frozenset([18, 19, 20, 21, 22, 23, 24])
+    FOLLOW_ordinals_in_specifictime69 = frozenset([18, 19, 20, 21, 22, 23, 24, 25])
     FOLLOW_weekdays_in_specifictime71 = frozenset([4])
-    FOLLOW_OF_in_specifictime75 = frozenset([10, 11, 12, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
-    FOLLOW_monthspec_in_specifictime78 = frozenset([5])
-    FOLLOW_quarterspec_in_specifictime80 = frozenset([5])
-    FOLLOW_TIME_in_specifictime93 = frozenset([1])
-    FOLLOW_EVERY_in_interval112 = frozenset([7, 8])
-    FOLLOW_set_in_interval122 = frozenset([16, 17])
-    FOLLOW_period_in_interval138 = frozenset([1])
-    FOLLOW_EVERY_in_ordinals157 = frozenset([1])
-    FOLLOW_ordinal_in_ordinals173 = frozenset([1, 9])
-    FOLLOW_COMMA_in_ordinals176 = frozenset([10, 11, 12, 13, 14, 15])
-    FOLLOW_ordinal_in_ordinals178 = frozenset([1, 9])
-    FOLLOW_set_in_ordinal199 = frozenset([1])
-    FOLLOW_set_in_period238 = frozenset([1])
-    FOLLOW_weekday_in_weekdays261 = frozenset([1, 9])
-    FOLLOW_COMMA_in_weekdays264 = frozenset([18, 19, 20, 21, 22, 23, 24])
-    FOLLOW_weekday_in_weekdays266 = frozenset([1, 9])
-    FOLLOW_set_in_weekday285 = frozenset([1])
-    FOLLOW_MONTH_in_monthspec344 = frozenset([1])
-    FOLLOW_months_in_monthspec354 = frozenset([1])
-    FOLLOW_month_in_months371 = frozenset([1, 9])
-    FOLLOW_COMMA_in_months374 = frozenset([25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37])
-    FOLLOW_month_in_months376 = frozenset([1, 9])
-    FOLLOW_set_in_month395 = frozenset([1])
-    FOLLOW_QUARTER_in_quarterspec468 = frozenset([1])
-    FOLLOW_quarter_ordinals_in_quarterspec480 = frozenset([25])
-    FOLLOW_MONTH_in_quarterspec482 = frozenset([4])
-    FOLLOW_OF_in_quarterspec484 = frozenset([38])
-    FOLLOW_QUARTER_in_quarterspec486 = frozenset([1])
-    FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals505 = frozenset([1, 9])
-    FOLLOW_COMMA_in_quarter_ordinals508 = frozenset([10, 11, 12, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
-    FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals510 = frozenset([1, 9])
-    FOLLOW_set_in_month_of_quarter_ordinal529 = frozenset([1])
+    FOLLOW_OF_in_specifictime74 = frozenset([10, 11, 12, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39])
+    FOLLOW_monthspec_in_specifictime77 = frozenset([5])
+    FOLLOW_quarterspec_in_specifictime79 = frozenset([5])
+    FOLLOW_ordinals_in_specifictime96 = frozenset([18, 19, 20, 21, 22, 23, 24, 25])
+    FOLLOW_weekdays_in_specifictime98 = frozenset([5])
+    FOLLOW_TIME_in_specifictime112 = frozenset([1])
+    FOLLOW_EVERY_in_interval131 = frozenset([7, 8])
+    FOLLOW_set_in_interval141 = frozenset([16, 17])
+    FOLLOW_period_in_interval157 = frozenset([1])
+    FOLLOW_EVERY_in_ordinals176 = frozenset([1])
+    FOLLOW_ordinal_in_ordinals192 = frozenset([1, 9])
+    FOLLOW_COMMA_in_ordinals195 = frozenset([10, 11, 12, 13, 14, 15])
+    FOLLOW_ordinal_in_ordinals197 = frozenset([1, 9])
+    FOLLOW_set_in_ordinal218 = frozenset([1])
+    FOLLOW_set_in_period257 = frozenset([1])
+    FOLLOW_DAY_in_weekdays280 = frozenset([1])
+    FOLLOW_weekday_in_weekdays288 = frozenset([1, 9])
+    FOLLOW_COMMA_in_weekdays291 = frozenset([18, 19, 20, 21, 22, 23, 24, 25])
+    FOLLOW_weekday_in_weekdays293 = frozenset([1, 9])
+    FOLLOW_set_in_weekday314 = frozenset([1])
+    FOLLOW_MONTH_in_monthspec373 = frozenset([1])
+    FOLLOW_months_in_monthspec383 = frozenset([1])
+    FOLLOW_month_in_months400 = frozenset([1, 9])
+    FOLLOW_COMMA_in_months403 = frozenset([26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
+    FOLLOW_month_in_months405 = frozenset([1, 9])
+    FOLLOW_set_in_month424 = frozenset([1])
+    FOLLOW_QUARTER_in_quarterspec497 = frozenset([1])
+    FOLLOW_quarter_ordinals_in_quarterspec509 = frozenset([26])
+    FOLLOW_MONTH_in_quarterspec511 = frozenset([4])
+    FOLLOW_OF_in_quarterspec513 = frozenset([39])
+    FOLLOW_QUARTER_in_quarterspec515 = frozenset([1])
+    FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals534 = frozenset([1, 9])
+    FOLLOW_COMMA_in_quarter_ordinals537 = frozenset([10, 11, 12, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39])
+    FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals539 = frozenset([1, 9])
+    FOLLOW_set_in_month_of_quarter_ordinal558 = frozenset([1])
 
 
 
--- a/thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py	Mon May 18 14:22:45 2009 +0200
@@ -380,6 +380,8 @@
   limit_ = 0
   has_require_perfect_plan_ = 0
   require_perfect_plan_ = 0
+  has_keys_only_ = 0
+  keys_only_ = 0
 
   def __init__(self, contents=None):
     self.filter_ = []
@@ -545,6 +547,19 @@
 
   def has_require_perfect_plan(self): return self.has_require_perfect_plan_
 
+  def keys_only(self): return self.keys_only_
+
+  def set_keys_only(self, x):
+    self.has_keys_only_ = 1
+    self.keys_only_ = x
+
+  def clear_keys_only(self):
+    if self.has_keys_only_:
+      self.has_keys_only_ = 0
+      self.keys_only_ = 0
+
+  def has_keys_only(self): return self.has_keys_only_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -559,6 +574,7 @@
     if (x.has_limit()): self.set_limit(x.limit())
     for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
     if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
+    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
 
   def Equals(self, x):
     if x is self: return 1
@@ -587,6 +603,8 @@
       if e1 != e2: return 0
     if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0
     if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0
+    if self.has_keys_only_ != x.has_keys_only_: return 0
+    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -620,6 +638,7 @@
     n += 2 * len(self.composite_index_)
     for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
     if (self.has_require_perfect_plan_): n += 3
+    if (self.has_keys_only_): n += 3
     return n + 1
 
   def Clear(self):
@@ -634,6 +653,7 @@
     self.clear_limit()
     self.clear_composite_index()
     self.clear_require_perfect_plan()
+    self.clear_keys_only()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -672,6 +692,9 @@
     if (self.has_require_perfect_plan_):
       out.putVarInt32(160)
       out.putBoolean(self.require_perfect_plan_)
+    if (self.has_keys_only_):
+      out.putVarInt32(168)
+      out.putBoolean(self.keys_only_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -715,6 +738,9 @@
       if tt == 160:
         self.set_require_perfect_plan(d.getBoolean())
         continue
+      if tt == 168:
+        self.set_keys_only(d.getBoolean())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -756,6 +782,7 @@
       res+=prefix+">\n"
       cnt+=1
     if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_))
+    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
     return res
 
   kapp = 1
@@ -773,6 +800,7 @@
   klimit = 16
   kcomposite_index = 19
   krequire_perfect_plan = 20
+  kkeys_only = 21
 
   _TEXT = (
    "ErrorCode",
@@ -796,6 +824,7 @@
    "hint",
    "composite_index",
    "require_perfect_plan",
+   "keys_only",
   )
 
   _TYPES = (
@@ -840,6 +869,8 @@
 
    ProtocolBuffer.Encoder.NUMERIC,
 
+   ProtocolBuffer.Encoder.NUMERIC,
+
   )
 
   _STYLE = """"""
@@ -2416,6 +2447,8 @@
   cursor_ = None
   has_more_results_ = 0
   more_results_ = 0
+  has_keys_only_ = 0
+  keys_only_ = 0
 
   def __init__(self, contents=None):
     self.result_ = []
@@ -2469,12 +2502,26 @@
 
   def has_more_results(self): return self.has_more_results_
 
+  def keys_only(self): return self.keys_only_
+
+  def set_keys_only(self, x):
+    self.has_keys_only_ = 1
+    self.keys_only_ = x
+
+  def clear_keys_only(self):
+    if self.has_keys_only_:
+      self.has_keys_only_ = 0
+      self.keys_only_ = 0
+
+  def has_keys_only(self): return self.has_keys_only_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
     for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
     if (x.has_more_results()): self.set_more_results(x.more_results())
+    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
 
   def Equals(self, x):
     if x is self: return 1
@@ -2485,6 +2532,8 @@
       if e1 != e2: return 0
     if self.has_more_results_ != x.has_more_results_: return 0
     if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
+    if self.has_keys_only_ != x.has_keys_only_: return 0
+    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -2503,12 +2552,14 @@
     if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize())
     n += 1 * len(self.result_)
     for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
+    if (self.has_keys_only_): n += 2
     return n + 2
 
   def Clear(self):
     self.clear_cursor()
     self.clear_result()
     self.clear_more_results()
+    self.clear_keys_only()
 
   def OutputUnchecked(self, out):
     if (self.has_cursor_):
@@ -2521,6 +2572,9 @@
       self.result_[i].OutputUnchecked(out)
     out.putVarInt32(24)
     out.putBoolean(self.more_results_)
+    if (self.has_keys_only_):
+      out.putVarInt32(32)
+      out.putBoolean(self.keys_only_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -2540,6 +2594,9 @@
       if tt == 24:
         self.set_more_results(d.getBoolean())
         continue
+      if tt == 32:
+        self.set_keys_only(d.getBoolean())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -2559,17 +2616,20 @@
       res+=prefix+">\n"
       cnt+=1
     if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
+    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
     return res
 
   kcursor = 1
   kresult = 2
   kmore_results = 3
+  kkeys_only = 4
 
   _TEXT = (
    "ErrorCode",
    "cursor",
    "result",
    "more_results",
+   "keys_only",
   )
 
   _TYPES = (
@@ -2580,6 +2640,8 @@
 
    ProtocolBuffer.Encoder.NUMERIC,
 
+   ProtocolBuffer.Encoder.NUMERIC,
+
   )
 
   _STYLE = """"""
--- a/thirdparty/google_appengine/google/appengine/datastore/entity_pb.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/datastore/entity_pb.py	Mon May 18 14:22:45 2009 +0200
@@ -1019,6 +1019,10 @@
       if debug_strs is not None:
         debug_strs.append('Required field: value not set.')
     elif not self.value_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_multiple_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: multiple not set.')
     return initialized
 
   def ByteSize(self):
@@ -1027,8 +1031,7 @@
     if (self.has_meaning_uri_): n += 1 + self.lengthString(len(self.meaning_uri_))
     n += self.lengthString(len(self.name_))
     n += self.lengthString(self.value_.ByteSize())
-    if (self.has_multiple_): n += 2
-    return n + 2
+    return n + 4
 
   def Clear(self):
     self.clear_meaning()
@@ -1046,9 +1049,8 @@
       out.putPrefixedString(self.meaning_uri_)
     out.putVarInt32(26)
     out.putPrefixedString(self.name_)
-    if (self.has_multiple_):
-      out.putVarInt32(32)
-      out.putBoolean(self.multiple_)
+    out.putVarInt32(32)
+    out.putBoolean(self.multiple_)
     out.putVarInt32(42)
     out.putVarInt32(self.value_.ByteSize())
     self.value_.OutputUnchecked(out)
--- a/thirdparty/google_appengine/google/appengine/dist/socket.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/dist/socket.py	Mon May 18 14:22:45 2009 +0200
@@ -41,3 +41,5 @@
   if not hasattr(fp, 'fileno'):
     fp.fileno = lambda: None
   return fp
+
+ssl = None
--- a/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Mon May 18 14:22:45 2009 +0200
@@ -1101,6 +1101,9 @@
   def parse(self, value):
     return None
 
+  def python_type(self):
+    return None
+
   def format(self, value):
     return 'None'
 
--- a/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Mon May 18 14:22:45 2009 +0200
@@ -269,6 +269,9 @@
       model_class._properties[attr_name] = attr
       attr.__property_config__(model_class, attr_name)
 
+  model_class._unindexed_properties = frozenset(
+    name for name, prop in model_class._properties.items() if not prop.indexed)
+
 
 class PropertiedClass(type):
   """Meta-class for initializing Model classes properties.
@@ -336,8 +339,14 @@
 
   creation_counter = 0
 
-  def __init__(self, verbose_name=None, name=None, default=None,
-               required=False, validator=None, choices=None):
+  def __init__(self,
+               verbose_name=None,
+               name=None,
+               default=None,
+               required=False,
+               validator=None,
+               choices=None,
+               indexed=True):
     """Initializes this Property with the given options.
 
     Args:
@@ -348,6 +357,7 @@
       required: Whether property is required.
       validator: User provided method used for validation.
       choices: User provided set of valid property values.
+      indexed: Whether property is indexed.
     """
     self.verbose_name = verbose_name
     self.name = name
@@ -355,6 +365,7 @@
     self.required = required
     self.validator = validator
     self.choices = choices
+    self.indexed = indexed
     self.creation_counter = Property.creation_counter
     Property.creation_counter += 1
 
@@ -489,6 +500,21 @@
     """
     return value
 
+  def _require_parameter(self, kwds, parameter, value):
+    """Sets kwds[parameter] to value.
+
+    If kwds[parameter] exists and is not value, raises ConfigurationError.
+
+    Args:
+      kwds: The parameter dict, which maps parameter names (strings) to values.
+      parameter: The name of the parameter to set.
+      value: The value to set it to.
+    """
+    if parameter in kwds and kwds[parameter] != value:
+      raise ConfigurationError('%s must be %s.' % (parameter, value))
+
+    kwds[parameter] = value
+
   def _attr_name(self):
     """Attribute name we use for this property in model instances.
 
@@ -685,20 +711,15 @@
     if self.is_saved():
       entity = self._entity
     else:
+      kwds = {'_app': self._app,
+              'name': self._key_name,
+              'unindexed_properties': self._unindexed_properties}
+
       if self._parent_key is not None:
-        entity = _entity_class(self.kind(),
-                               parent=self._parent_key,
-                               name=self._key_name,
-                               _app=self._app)
+        kwds['parent'] = self._parent_key
       elif self._parent is not None:
-        entity = _entity_class(self.kind(),
-                               parent=self._parent._entity,
-                               name=self._key_name,
-                               _app=self._app)
-      else:
-        entity = _entity_class(self.kind(),
-                               name=self._key_name,
-                               _app=self._app)
+        kwds['parent'] = self._parent._entity
+      entity = _entity_class(self.kind(), **kwds)
 
     self._to_entity(entity)
     return entity
@@ -932,13 +953,13 @@
     return run_in_transaction(txn)
 
   @classmethod
-  def all(cls):
+  def all(cls, **kwds):
     """Returns a query over all instances of this model from the datastore.
 
     Returns:
       Query that will retrieve all instances from entity collection.
     """
-    return Query(cls)
+    return Query(cls, **kwds)
 
   @classmethod
   def gql(cls, query_string, *args, **kwds):
@@ -1300,13 +1321,23 @@
 class _BaseQuery(object):
   """Base class for both Query and GqlQuery."""
 
-  def __init__(self, model_class):
-    """Constructor."
-
-      Args:
-        model_class: Model class from which entities are constructed.
+  def __init__(self, model_class, keys_only=False):
+    """Constructor.
+
+    Args:
+      model_class: Model class from which entities are constructed.
+      keys_only: Whether the query should return full entities or only keys.
     """
     self._model_class = model_class
+    self._keys_only = keys_only
+
+  def is_keys_only(self):
+    """Returns whether this query is keys only.
+
+    Returns:
+      True if this query returns keys, False if it returns entities.
+    """
+    return self._keys_only
 
   def _get_query(self):
     """Subclass must override (and not call their super method).
@@ -1325,7 +1356,11 @@
     Returns:
       Iterator for this query.
     """
-    return _QueryIterator(self._model_class, iter(self._get_query().Run()))
+    iterator = self._get_query().Run()
+    if self._keys_only:
+      return iterator
+    else:
+      return _QueryIterator(self._model_class, iter(iterator))
 
   def __iter__(self):
     """Iterator for this query.
@@ -1388,7 +1423,11 @@
     if limit == 0:
       return []
     raw = self._get_query().Get(limit, offset)
-    return map(self._model_class.from_entity, raw)
+
+    if self._keys_only:
+      return raw
+    else:
+      return [self._model_class.from_entity(e) for e in raw]
 
   def __getitem__(self, arg):
     """Support for query[index] and query[start:stop].
@@ -1529,13 +1568,14 @@
        print story.title
   """
 
-  def __init__(self, model_class):
+  def __init__(self, model_class, keys_only=False):
     """Constructs a query over instances of the given Model.
 
     Args:
       model_class: Model class to build query for.
+      keys_only: Whether the query should return full entities or only keys.
     """
-    super(Query, self).__init__(model_class)
+    super(Query, self).__init__(model_class, keys_only)
     self.__query_sets = [{}]
     self.__orderings = []
     self.__ancestor = None
@@ -1545,7 +1585,10 @@
                  _multi_query_class=datastore.MultiQuery):
     queries = []
     for query_set in self.__query_sets:
-      query = _query_class(self._model_class.kind(), query_set)
+      query = _query_class(self._model_class.kind(),
+                           query_set,
+                           keys_only=self._keys_only)
+      query.Order(*self.__orderings)
       if self.__ancestor is not None:
         query.Ancestor(self.__ancestor)
       queries.append(query)
@@ -1566,7 +1609,6 @@
                              ' _multi_query_class is overridden.')
 
     if len(queries) == 1:
-      queries[0].Order(*self.__orderings)
       return queries[0]
     else:
       return _multi_query_class(queries, self.__orderings)
@@ -1611,6 +1653,9 @@
 
     Returns:
       Self to support method chaining.
+
+    Raises:
+      PropertyError if invalid property is provided.
     """
     match = _FILTER_REGEX.match(property_operator)
     prop = match.group(1)
@@ -1619,8 +1664,13 @@
     else:
       operator = '=='
 
+    if prop in self._model_class._unindexed_properties:
+      raise PropertyError('Property \'%s\' is not indexed' % prop)
+
     if operator.lower() == 'in':
-      if not isinstance(value, (list, tuple)):
+      if self._keys_only:
+        raise BadQueryError('Keys only queries do not support IN filters.')
+      elif not isinstance(value, (list, tuple)):
         raise BadValueError('Argument to the "in" operator must be a list')
       values = [_normalize_query_parameter(v) for v in value]
       self.__filter_disjunction(prop + ' =', values)
@@ -1628,6 +1678,8 @@
       if isinstance(value, (list, tuple)):
         raise BadValueError('Filtering on lists is not supported')
       if operator == '!=':
+        if self._keys_only:
+          raise BadQueryError('Keys only queries do not support != filters.')
         self.__filter_disjunction([prop + ' <', prop + ' >'],
                                   _normalize_query_parameter(value))
       else:
@@ -1650,7 +1702,7 @@
       Self to support method chaining.
 
     Raises:
-      PropertyError if invalid property name is provided.
+      PropertyError if invalid property is provided.
     """
     if property.startswith('-'):
       property = property[1:]
@@ -1663,6 +1715,9 @@
           property not in datastore_types._SPECIAL_PROPERTIES):
         raise PropertyError('Invalid property name \'%s\'' % property)
 
+    if property in self._model_class._unindexed_properties:
+      raise PropertyError('Property \'%s\' is not indexed' % property)
+
     self.__orderings.append((property, order))
     return self
 
@@ -1709,11 +1764,24 @@
       query_string: Properly formatted GQL query string.
       *args: Positional arguments used to bind numeric references in the query.
       **kwds: Dictionary-based arguments for named references.
+
+    Raises:
+      PropertyError if the query filters or sorts on a property that's not
+      indexed.
     """
     from google.appengine.ext import gql
     app = kwds.pop('_app', None)
+
     self._proto_query = gql.GQL(query_string, _app=app)
-    super(GqlQuery, self).__init__(class_for_kind(self._proto_query._entity))
+    model_class = class_for_kind(self._proto_query._entity)
+    super(GqlQuery, self).__init__(model_class,
+                                   keys_only=self._proto_query._keys_only)
+
+    for property, unused in (self._proto_query.filters().keys() +
+                             self._proto_query.orderings()):
+      if property in model_class._unindexed_properties:
+        raise PropertyError('Property \'%s\' is not indexed' % property)
+
     self.bind(*args, **kwds)
 
   def bind(self, *args, **kwds):
@@ -1740,39 +1808,56 @@
   def run(self):
     """Override _BaseQuery.run() so the LIMIT clause is handled properly."""
     query_run = self._proto_query.Run(*self._args, **self._kwds)
-    return _QueryIterator(self._model_class, iter(query_run))
+    if self._keys_only:
+      return query_run
+    else:
+      return _QueryIterator(self._model_class, iter(query_run))
 
   def _get_query(self):
     return self._proto_query.Bind(self._args, self._kwds)
 
 
-class TextProperty(Property):
-  """A string that can be longer than 500 bytes.
-
-  This type should be used for large text values to make sure the datastore
-  has good performance for queries.
+class UnindexedProperty(Property):
+  """A property that isn't indexed by either built-in or composite indices.
+
+  TextProperty and BlobProperty derive from this class.
   """
+  def __init__(self, *args, **kwds):
+    """Construct property. See the Property class for details.
+
+    Raises:
+      ConfigurationError if indexed=True.
+    """
+    self._require_parameter(kwds, 'indexed', False)
+    kwds['indexed'] = True
+    super(UnindexedProperty, self).__init__(*args, **kwds)
 
   def validate(self, value):
-    """Validate text property.
+    """Validate property.
 
     Returns:
       A valid value.
 
     Raises:
-      BadValueError if property is not instance of 'Text'.
+      BadValueError if property is not an instance of data_type.
     """
-    if value is not None and not isinstance(value, Text):
+    if value is not None and not isinstance(value, self.data_type):
       try:
-        value = Text(value)
+        value = self.data_type(value)
       except TypeError, err:
         raise BadValueError('Property %s must be convertible '
-                            'to a Text instance (%s)' % (self.name, err))
-    value = super(TextProperty, self).validate(value)
-    if value is not None and not isinstance(value, Text):
-      raise BadValueError('Property %s must be a Text instance' % self.name)
+                            'to a %s instance (%s)' %
+                            (self.name, self.data_type.__name__, err))
+    value = super(UnindexedProperty, self).validate(value)
+    if value is not None and not isinstance(value, self.data_type):
+      raise BadValueError('Property %s must be a %s instance' %
+                          (self.name, self.data_type.__name__))
     return value
 
+
+class TextProperty(UnindexedProperty):
+  """A string that can be longer than 500 bytes."""
+
   data_type = Text
 
 
@@ -1886,32 +1971,8 @@
   data_type = PostalAddress
 
 
-class BlobProperty(Property):
-  """A string that can be longer than 500 bytes.
-
-  This type should be used for large binary values to make sure the datastore
-  has good performance for queries.
-  """
-
-  def validate(self, value):
-    """Validate blob property.
-
-    Returns:
-      A valid value.
-
-    Raises:
-      BadValueError if property is not instance of 'Blob'.
-    """
-    if value is not None and not isinstance(value, Blob):
-      try:
-        value = Blob(value)
-      except TypeError, err:
-        raise BadValueError('Property %s must be convertible '
-                            'to a Blob instance (%s)' % (self.name, err))
-    value = super(BlobProperty, self).validate(value)
-    if value is not None and not isinstance(value, Blob):
-      raise BadValueError('Property %s must be a Blob instance' % self.name)
-    return value
+class BlobProperty(UnindexedProperty):
+  """A byte string that can be longer than 500 bytes."""
 
   data_type = Blob
 
@@ -2266,9 +2327,15 @@
 class UserProperty(Property):
   """A user property."""
 
-  def __init__(self, verbose_name=None, name=None,
-               required=False, validator=None, choices=None,
-               auto_current_user=False, auto_current_user_add=False):
+  def __init__(self,
+               verbose_name=None,
+               name=None,
+               required=False,
+               validator=None,
+               choices=None,
+               auto_current_user=False,
+               auto_current_user_add=False,
+               indexed=True):
     """Initializes this Property with the given options.
 
     Note: this does *not* support the 'default' keyword argument.
@@ -2285,11 +2352,13 @@
         each time the entity is written to the datastore.
       auto_current_user_add: If true, the value is set to the current user
         the first time the entity is written to the datastore.
+      indexed: Whether property is indexed.
     """
     super(UserProperty, self).__init__(verbose_name, name,
                                        required=required,
                                        validator=validator,
-                                       choices=choices)
+                                       choices=choices,
+                                       indexed=indexed)
     self.auto_current_user = auto_current_user
     self.auto_current_user_add = auto_current_user_add
 
@@ -2360,13 +2429,14 @@
       raise TypeError('Item type should be a type object')
     if item_type not in _ALLOWED_PROPERTY_TYPES:
       raise ValueError('Item type %s is not acceptable' % item_type.__name__)
-    if 'required' in kwds and kwds['required'] is not True:
-      raise ValueError('List values must be required')
+    if issubclass(item_type, (Blob, Text)):
+      self._require_parameter(kwds, 'indexed', False)
+      kwds['indexed'] = True
+    self._require_parameter(kwds, 'required', True)
     if default is None:
       default = []
     self.item_type = item_type
     super(ListProperty, self).__init__(verbose_name,
-                                       required=True,
                                        default=default,
                                        **kwds)
 
--- a/thirdparty/google_appengine/google/appengine/ext/gql/__init__.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/gql/__init__.py	Mon May 18 14:22:45 2009 +0200
@@ -77,7 +77,7 @@
 
   The syntax for SELECT is fairly straightforward:
 
-  SELECT * FROM <entity>
+  SELECT [* | __key__ ] FROM <entity>
     [WHERE <condition> [AND <condition> ...]]
     [ORDER BY <property> [ASC | DESC] [, <property> [ASC | DESC] ...]]
     [LIMIT [<offset>,]<count>]
@@ -144,9 +144,8 @@
       simple types (strings, integers, floats).
 
 
-  SELECT * will return an iterable set of entries, but other operations (schema
-  queries, updates, inserts or field selections) will return alternative
-  result types.
+  SELECT * will return an iterable set of entities; SELECT __key__ will return
+  an iterable set of Keys.
   """
 
   TOKENIZE_REGEX = re.compile(r"""
@@ -229,7 +228,8 @@
       query_count = 1
 
     for i in xrange(query_count):
-      queries.append(datastore.Query(self._entity, _app=self.__app))
+      queries.append(datastore.Query(self._entity, _app=self.__app,
+                                     keys_only=self._keys_only))
 
     logging.log(LOG_LEVEL,
                 'Binding with %i positional args %s and %i keywords %s'
@@ -552,6 +552,9 @@
     Raises:
       BadArgumentError if the filter is invalid (namely non-list with IN)
     """
+    if condition.lower() in ('!=', 'in') and self._keys_only:
+      raise datastore_errors.BadQueryError(
+        'Keys only queries do not support IN or != filters.')
 
     def CloneQueries(queries, n):
       """Do a full copy of the queries and append to the end of the queries.
@@ -675,6 +678,7 @@
 
   __iter__ = Run
 
+  __result_type_regex = re.compile(r'(\*|__key__)')
   __quoted_string_regex = re.compile(r'((?:\'[^\'\n\r]*\')+)')
   __ordinal_regex = re.compile(r':(\d+)$')
   __named_regex = re.compile(r':(\w+)$')
@@ -783,7 +787,8 @@
       True if parsing completed okay.
     """
     self.__Expect('SELECT')
-    self.__Expect('*')
+    result_type = self.__AcceptRegex(self.__result_type_regex)
+    self._keys_only = (result_type == '__key__')
     return self.__From()
 
   def __From(self):
--- a/thirdparty/google_appengine/google/appengine/ext/search/__init__.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/search/__init__.py	Mon May 18 14:22:45 2009 +0200
@@ -143,6 +143,7 @@
     self._word_delimiter_regex = word_delimiter_regex
     if isinstance(kind_or_entity, datastore.Entity):
       self._Entity__key = kind_or_entity._Entity__key
+      self._Entity__unindexed_properties = frozenset(kind_or_entity.unindexed_properties())
       self.update(kind_or_entity)
     else:
       super(SearchableEntity, self).__init__(kind_or_entity, *args, **kwargs)
--- a/thirdparty/google_appengine/google/appengine/tools/appcfg.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/appcfg.py	Mon May 18 14:22:45 2009 +0200
@@ -674,7 +674,7 @@
   """Provide facilities to export request logs."""
 
   def __init__(self, server, config, output_file,
-               num_days, append, severity, now):
+               num_days, append, severity, now, vhost):
     """Constructor.
 
     Args:
@@ -686,6 +686,7 @@
       append: True if appending to an existing file.
       severity: App log severity to request (0-4); None for no app logs.
       now: POSIX timestamp used for calculating valid dates for num_days.
+      vhost: The virtual host of log messages to get. None for all hosts.
     """
     self.server = server
     self.config = config
@@ -693,6 +694,7 @@
     self.append = append
     self.num_days = num_days
     self.severity = severity
+    self.vhost = vhost
     self.version_id = self.config.version + ".1"
     self.sentinel = None
     self.write_mode = "w"
@@ -770,6 +772,8 @@
       kwds["offset"] = offset
     if self.severity is not None:
       kwds["severity"] = str(self.severity)
+    if self.vhost is not None:
+      kwds["vhost"] = str(self.vhost)
     response = self.server.Send("/api/request_logs", payload=None, **kwds)
     response = response.replace("\r", "\0")
     lines = response.splitlines()
@@ -1789,7 +1793,8 @@
                                    self.options.num_days,
                                    self.options.append,
                                    self.options.severity,
-                                   time.time())
+                                   time.time(),
+                                   self.options.vhost)
     logs_requester.DownloadLogs()
 
   def _RequestLogsOptions(self, parser):
@@ -1813,6 +1818,10 @@
                       help="Severity of app-level log messages to get. "
                       "The range is 0 (DEBUG) through 4 (CRITICAL). "
                       "If omitted, only request logs are returned.")
+    parser.add_option("--vhost", type="string", dest="vhost",
+                      action="store", default=None,
+                      help="The virtual host of log messages to get. "
+                      "If omitted, all log messages are returned.")
 
   def CronInfo(self, now=None, output=sys.stdout):
     """Displays information about cron definitions.
@@ -1834,8 +1843,8 @@
         if not description:
           description = "<no description>"
         print >>output, "\n%s:\nURL: %s\nSchedule: %s" % (description,
-                                                          entry.schedule,
-                                                          entry.url)
+                                                          entry.url,
+                                                          entry.schedule)
         schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
         matches = schedule.GetMatches(now, self.options.num_runs)
         for match in matches:
@@ -1853,8 +1862,8 @@
                       help="Number of runs of each cron job to display"
                       "Default is 5")
 
-  def _CheckRequiredUploadOptions(self):
-    """Checks that upload options are present."""
+  def _CheckRequiredLoadOptions(self):
+    """Checks that upload/download options are present."""
     for option in ["filename", "kind", "config_file"]:
       if getattr(self.options, option) is None:
         self.parser.error("Option '%s' is required." % option)
@@ -1863,7 +1872,7 @@
                         "assigned to an endpoint in app.yaml, or provide "
                         "the url of the handler via the 'url' option.")
 
-  def InferUploadUrl(self, appyaml):
+  def InferRemoteApiUrl(self, appyaml):
     """Uses app.yaml to determine the remote_api endpoint.
 
     Args:
@@ -1885,11 +1894,11 @@
             return "http://%s%s" % (server, handler.url)
     return None
 
-  def RunBulkloader(self, **kwargs):
+  def RunBulkloader(self, arg_dict):
     """Invokes the bulkloader with the given keyword arguments.
 
     Args:
-      kwargs: Keyword arguments to pass to bulkloader.Run().
+      arg_dict: Dictionary of arguments to pass to bulkloader.Run().
     """
     try:
       import sqlite3
@@ -1898,17 +1907,10 @@
                     "sqlite3 module (included in python since 2.5).")
       sys.exit(1)
 
-    sys.exit(bulkloader.Run(kwargs))
-
-  def PerformUpload(self, run_fn=None):
-    """Performs a datastore upload via the bulkloader.
+    sys.exit(bulkloader.Run(arg_dict))
 
-    Args:
-      run_fn: Function to invoke the bulkloader, used for testing.
-    """
-    if run_fn is None:
-      run_fn = self.RunBulkloader
-
+  def _SetupLoad(self):
+    """Performs common verification and set up for upload and download."""
     if len(self.args) != 1:
       self.parser.error("Expected <directory> argument.")
 
@@ -1918,11 +1920,11 @@
     self.options.app_id = appyaml.application
 
     if not self.options.url:
-      url = self.InferUploadUrl(appyaml)
+      url = self.InferRemoteApiUrl(appyaml)
       if url is not None:
         self.options.url = url
 
-    self._CheckRequiredUploadOptions()
+    self._CheckRequiredLoadOptions()
 
     if self.options.batch_size < 1:
       self.parser.error("batch_size must be 1 or larger.")
@@ -1934,34 +1936,68 @@
       logging.getLogger().setLevel(logging.DEBUG)
       self.options.debug = True
 
+  def _MakeLoaderArgs(self):
+    return dict([(arg_name, getattr(self.options, arg_name, None)) for
+                 arg_name in (
+        "app_id",
+        "url",
+        "filename",
+        "batch_size",
+        "kind",
+        "num_threads",
+        "bandwidth_limit",
+        "rps_limit",
+        "http_limit",
+        "db_filename",
+        "config_file",
+        "auth_domain",
+        "has_header",
+        "loader_opts",
+        "log_file",
+        "passin",
+        "email",
+        "debug",
+        "exporter_opts",
+        "result_db_filename",
+        )])
+
+  def PerformDownload(self, run_fn=None):
+    """Performs a datastore download via the bulkloader.
+
+    Args:
+      run_fn: Function to invoke the bulkloader, used for testing.
+    """
+    if run_fn is None:
+      run_fn = self.RunBulkloader
+    self._SetupLoad()
+
+    StatusUpdate("Downloading data records.")
+
+    args = self._MakeLoaderArgs()
+    args['download'] = True
+    args['has_header'] = False
+
+    run_fn(args)
+
+  def PerformUpload(self, run_fn=None):
+    """Performs a datastore upload via the bulkloader.
+
+    Args:
+      run_fn: Function to invoke the bulkloader, used for testing.
+    """
+    if run_fn is None:
+      run_fn = self.RunBulkloader
+    self._SetupLoad()
+
     StatusUpdate("Uploading data records.")
 
-    run_fn(app_id=self.options.app_id,
-           url=self.options.url,
-           filename=self.options.filename,
-           batch_size=self.options.batch_size,
-           kind=self.options.kind,
-           num_threads=self.options.num_threads,
-           bandwidth_limit=self.options.bandwidth_limit,
-           rps_limit=self.options.rps_limit,
-           http_limit=self.options.http_limit,
-           db_filename=self.options.db_filename,
-           config_file=self.options.config_file,
-           auth_domain=self.options.auth_domain,
-           has_header=self.options.has_header,
-           loader_opts=self.options.loader_opts,
-           log_file=self.options.log_file,
-           passin=self.options.passin,
-           email=self.options.email,
-           debug=self.options.debug,
+    args = self._MakeLoaderArgs()
+    args['download'] = False
 
-           exporter_opts=None,
-           download=False,
-           result_db_filename=None,
-           )
+    run_fn(args)
 
-  def _PerformUploadOptions(self, parser):
-    """Adds 'upload_data' specific options to the 'parser' passed in.
+  def _PerformLoadOptions(self, parser):
+    """Adds options common to 'upload_data' and 'download_data'.
 
     Args:
       parser: An instance of OptionsParser.
@@ -2000,16 +2036,39 @@
     parser.add_option("--auth_domain", type="string", dest="auth_domain",
                       action="store", default="gmail.com",
                       help="The name of the authorization domain to use.")
+    parser.add_option("--log_file", type="string", dest="log_file",
+                      help="File to write bulkloader logs.  If not supplied "
+                           "then a new log file will be created, named: "
+                           "bulkloader-log-TIMESTAMP.")
+
+  def _PerformUploadOptions(self, parser):
+    """Adds 'upload_data' specific options to the 'parser' passed in.
+
+    Args:
+      parser: An instance of OptionsParser.
+    """
+    self._PerformLoadOptions(parser)
     parser.add_option("--has_header", dest="has_header",
                       action="store_true", default=False,
                       help="Whether the first line of the input file should be"
                       " skipped")
     parser.add_option("--loader_opts", type="string", dest="loader_opts",
-                      help="A string to pass to the Loader.Initialize method.")
-    parser.add_option("--log_file", type="string", dest="log_file",
-                      help="File to write bulkloader logs.  If not supplied "
-                           "then a new log file will be created, named: "
-                           "bulkloader-log-TIMESTAMP.")
+                      help="A string to pass to the Loader.initialize method.")
+
+  def _PerformDownloadOptions(self, parser):
+    """Adds 'download_data' specific options to the 'parser' passed in.
+
+    Args:
+      parser: An instance of OptionsParser.
+    """
+    self._PerformLoadOptions(parser)
+    parser.add_option("--exporter_opts", type="string", dest="exporter_opts",
+                      help="A string to pass to the Exporter.initialize method."
+                      )
+    parser.add_option("--result_db_filename", type="string",
+                      dest="result_db_filename",
+                      action="store",
+                      help="Database to write entities to for download.")
 
   class Action(object):
     """Contains information about a command line action.
@@ -2121,11 +2180,20 @@
           function="PerformUpload",
           usage="%prog [options] upload_data <directory>",
           options=_PerformUploadOptions,
-          short_desc="Upload CSV records to datastore",
+          short_desc="Upload data records to datastore.",
           long_desc="""
-The 'upload_data' command translates CSV records into datastore entities and
+The 'upload_data' command translates input records into datastore entities and
 uploads them into your application's datastore."""),
 
+      "download_data": Action(
+          function="PerformDownload",
+          usage="%prog [options] download_data <directory>",
+          options=_PerformDownloadOptions,
+          short_desc="Download entities from datastore.",
+          long_desc="""
+The 'download_data' command downloads datastore entities and writes them to
+file as CSV or developer defined format."""),
+
 
 
   }
--- a/thirdparty/google_appengine/google/appengine/tools/bulkloader.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/bulkloader.py	Mon May 18 14:22:45 2009 +0200
@@ -1702,8 +1702,11 @@
       duration: The duration of the transfer in seconds.
     """
     if duration > self.threshhold2:
-      self.DecreaseWorkers()
+      logger.debug('Transfer took %s, decreasing workers.', duration)
+      self.DecreaseWorkers(backoff=False)
+      return
     elif duration > self.threshhold1:
+      logger.debug('Transfer took %s, not increasing workers.', duration)
       return
     elif self.enabled:
       if self.backoff_time > 0.0:
@@ -1722,13 +1725,17 @@
                      self.enabled_count)
         self.thread_semaphore.release()
 
-  def DecreaseWorkers(self):
+  def DecreaseWorkers(self, backoff=True):
     """Informs the thread_gate that an item failed to send.
 
     If thread throttling is enabled, this method will cause the
     throttler to allow one fewer thread in the critical section. If
     there is only one thread remaining, failures will result in
     exponential backoff until there is a success.
+
+    Args:
+      backoff: Whether to increase exponential backoff if there is only
+        one thread enabled.
     """
     if self.enabled:
       do_disable = False
@@ -1738,7 +1745,7 @@
           if self.enabled_count > 1:
             do_disable = True
             self.enabled_count -= 1
-          else:
+          elif backoff:
             if self.backoff_time == 0.0:
               self.backoff_time = INITIAL_BACKOFF
             else:
@@ -2138,8 +2145,8 @@
               status = 200
               transferred = True
               transfer_time = self.get_time() - t
-              logger.debug('[%s] %s Transferred %d entities', self.getName(),
-                           item, item.count)
+              logger.debug('[%s] %s Transferred %d entities in %0.1f seconds',
+                           self.getName(), item, item.count, transfer_time)
               self.throttle.AddTransfer(RECORDS, item.count)
             except (db.InternalError, db.NotSavedError, db.Timeout,
                     apiproxy_errors.OverQuotaError,
@@ -2169,8 +2176,8 @@
         finally:
           if transferred:
             item.MarkAsTransferred()
+            self.work_queue.task_done()
             self.thread_gate.TransferSuccess(transfer_time)
-            self.work_queue.task_done()
           else:
             item.MarkAsError()
             try:
@@ -2314,6 +2321,7 @@
     if export_result:
       item.Process(export_result, self.num_threads, self.batch_size,
                    self.work_queue)
+    item.state = STATE_GOT
 
 
 class DataSourceThread(_ThreadBase):
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Thu May 14 22:40:39 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Mon May 18 14:22:45 2009 +0200
@@ -1178,6 +1178,7 @@
     'timing',
     'unicodedata',
     'zlib',
+    '_ast',
     '_bisect',
     '_codecs',
     '_codecs_cn',
@@ -3230,6 +3231,7 @@
     'capability_service',
     capability_stub.CapabilityServiceStub())
 
+
   try:
     from google.appengine.api.images import images_stub
     apiproxy_stub_map.apiproxy.RegisterStub(
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/COPYING	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,342 @@
+
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+     59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Library General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	    How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) year  name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Library General
+Public License instead of this License.
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/baz_load_dirs	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,20 @@
+#!/usr/bin/python
+# Copyright (C) 2005-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+from vcs_support import init
+init.run("baz")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/darcs_load_dirs	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,25 @@
+#!/usr/bin/python
+
+### REMEMBER TO UPDATE VERSION IN vcs_support/init.py
+
+# Copyright (C) 2003-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+
+from vcs_support import init
+
+init.run("darcs")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/baz-load-dirs.install	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,2 @@
+debian/tmp/usr/bin/baz_load_dirs usr/bin
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/baz-load-dirs.manpages	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,1 @@
+docs/baz_load_dirs.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/changelog	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,315 @@
+vcs-load-dirs (1.1.6) unstable; urgency=low
+
+  * Added Vcs-* and Homepage control lines.
+
+ -- John Goerzen <jgoerzen@complete.org>  Sat, 01 Mar 2008 20:49:13 -0600
+
+vcs-load-dirs (1.1.5) unstable; urgency=low
+
+  * Better git rm handling.  Patch from agx@sigxcpu.org.
+    Closes: #417307.
+  * Convert changelog to UTF-8.  Closes: #453970.  (And the merged
+    bugs from the other binary debs generated from this package.)
+
+ -- John Goerzen <jgoerzen@complete.org>  Sun, 10 Feb 2008 07:52:54 -0600
+
+vcs-load-dirs (1.1.4) unstable; urgency=low
+
+  * Better documented -f
+  * Put Mercurial log where it won't get accidentally checked in
+  * Unify logfile generation across VCSs
+
+ -- John Goerzen <jgoerzen@complete.org>  Sat,  3 Mar 2007 08:12:28 -0600
+
+vcs-load-dirs (1.1.3) unstable; urgency=low
+
+  * -f now suppresses the interactive interface; it isn't necessary
+    in that mode.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 28 Feb 2007 06:07:56 -0600
+
+vcs-load-dirs (1.1.2) unstable; urgency=low
+
+  * Can now import directly from tar.gz, tar.bz2, and zip archives.
+    Closes: #235240.
+  * Guido's patch for git summary was already applied.  Closes: #403505.
+  * Fixed bug with Mercurial renames.
+  * New option -f to request no changes other than commits to the VCS.
+  * Documented -n and -f in the manpages.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 28 Feb 2007 05:32:53 -0600
+
+vcs-load-dirs (1.1.1) unstable; urgency=low
+
+  * Fixed Mercurial log summary recording.
+  * Noted program's new homepage.
+
+ -- John Goerzen <jgoerzen@complete.org>  Tue, 27 Feb 2007 03:04:54 -0600
+
+vcs-load-dirs (1.1.0) unstable; urgency=low
+
+  * New version.
+  * Source renamed to vcs-load-dirs.
+
+ -- John Goerzen <jgoerzen@complete.org>  Mon, 26 Feb 2007 05:06:12 -0600
+
+tla-load-dirs (1.0.35) unstable; urgency=low
+
+  * Merge and ACK NMU 1.0.33.1.  Closes: #395128.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 22 Nov 2006 05:07:41 -0600
+
+tla-load-dirs (1.0.34) unstable; urgency=high
+
+  * Fix darcs support, which one of the earlier git patches broke.
+
+ -- John Goerzen <jgoerzen@complete.org>  Fri, 17 Nov 2006 06:16:14 -0600
+
+tla-load-dirs (1.0.33.1) unstable; urgency=low
+
+  * Non-maintainer upload.
+  * Make sure git-load-dirs actually gets installed (Closes: #395128)
+
+ -- Guido Guenther <agx@sigxcpu.org>  Thu,  9 Nov 2006 09:48:02 +0100
+
+tla-load-dirs (1.0.33) unstable; urgency=low
+
+  * Added support for git, thanks to a patch from Guido Guenther.
+    Closes: #387139.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 18 Oct 2006 04:00:47 -0500
+
+tla-load-dirs (1.0.32) unstable; urgency=low
+
+  * Now build baz-load-dirs again, since bazaar is no longer broken.
+    Closes: #390458.
+
+ -- John Goerzen <jgoerzen@complete.org>  Mon,  9 Oct 2006 07:02:03 -0500
+
+tla-load-dirs (1.0.31) unstable; urgency=high
+
+  * Update deps on load-dirs-common.  Closes: #385644.
+  * Update for new Python policy.  Closes: #380975.
+  * No longer build baz-load-dirs for Debian.  bazaar is not migrating
+    into testing, and is preventing this package from entering.
+  * Update standards-version to 3.7.2.
+
+ -- John Goerzen <jgoerzen@complete.org>  Mon,  4 Sep 2006 09:06:41 -0500
+
+tla-load-dirs (1.0.30) unstable; urgency=low
+
+  * Apply patch from Gregory Colpart to fix a typo in svk support.
+    Closes: #382291.
+
+ -- John Goerzen <jgoerzen@complete.org>  Thu, 10 Aug 2006 16:13:13 -0500
+
+tla-load-dirs (1.0.29) unstable; urgency=low
+
+  * Ack NMU.  Closes: #360730.
+  * Do import instead of commit when the current tree-version has no logs.
+    Patch from Lionel Elie Mamane.  Closes: #350234.
+
+ -- John Goerzen <jgoerzen@complete.org>  Sat, 17 Jun 2006 11:21:19 -0500
+
+tla-load-dirs (1.0.28-0.1) unstable; urgency=high
+
+  * Non-maintainer upload.
+  * Add commas in dependency fields (Closes: #360730).
+
+ -- Luk Claes <luk@debian.org>  Mon, 17 Apr 2006 11:12:32 +0200
+
+tla-load-dirs (1.0.28) unstable; urgency=low
+
+  * Applied patch from Teemu Ikonen to support baz.  Closes: #322622.
+
+ -- John Goerzen <jgoerzen@complete.org>  Tue,  6 Sep 2005 06:03:24 -0500
+
+tla-load-dirs (1.0.27) unstable; urgency=low
+
+  * Fixed docs for darcs_load_dirs and also fixed manpages to reflect
+    that upstream archive is now kept in darcs instead of tla.
+
+ -- John Goerzen <jgoerzen@complete.org>  Thu,  1 Sep 2005 06:18:46 -0500
+
+tla-load-dirs (1.0.26) unstable; urgency=low
+
+  * Applied patch from Marc Dequènes to support new tla 1.3 command syntax.
+    Closes: #319505.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 24 Aug 2005 06:26:53 -0500
+
+tla-load-dirs (1.0.25) unstable; urgency=low
+
+  * Fixed bug where moving files into a new directory could fail on darcs.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 17 Aug 2005 05:26:27 -0500
+
+tla-load-dirs (1.0.24) unstable; urgency=low
+
+  * Now use darcs record -l instead of darcs add for adds.
+
+ -- John Goerzen <jgoerzen@complete.org>  Tue, 21 Jun 2005 06:47:27 -0500
+
+tla-load-dirs (1.0.23) unstable; urgency=low
+
+  * Missed a change to tla_wc.py that resulted because of the changes in
+    1.0.22.  Thanks James Vega for the helpful bug report.  Closes: #311582.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed,  1 Jun 2005 21:20:09 -0500
+
+tla-load-dirs (1.0.22) unstable; urgency=low
+
+  * Add --case-ok to darcs command line and sort filenames before adding.
+    Closes: #311041.
+
+ -- John Goerzen <jgoerzen@complete.org>  Tue, 31 May 2005 06:00:45 -0500
+
+tla-load-dirs (1.0.21) unstable; urgency=low
+
+  * No longer run "darcs check".
+
+ -- John Goerzen <jgoerzen@complete.org>  Sat, 14 May 2005 12:09:53 -0500
+
+tla-load-dirs (1.0.20) unstable; urgency=low
+
+  * Fixed rename problem caused by the fix in 1.0.19.
+
+ -- John Goerzen <jgoerzen@complete.org>  Mon, 25 Apr 2005 06:09:22 -0500
+
+tla-load-dirs (1.0.19) unstable; urgency=low
+
+  * [darcs] Fixed moving a file to a directory whose parent didn't yet exist.
+    Thanks to Matt Krai for helpful debugging information.  Closes: #293088.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 20 Apr 2005 22:04:15 -0500
+
+tla-load-dirs (1.0.18) unstable; urgency=low
+
+  * Fixed typos in control.  Closes: #300420.
+
+ -- John Goerzen <jgoerzen@complete.org>  Tue, 19 Apr 2005 23:56:29 -0500
+
+tla-load-dirs (1.0.17) unstable; urgency=low
+
+  * [darcs] No longer attempt to darcs remove files; darcs will detect
+    the deletion automatically.
+
+ -- John Goerzen <jgoerzen@complete.org>  Mon, 18 Apr 2005 20:16:14 -0500
+
+tla-load-dirs (1.0.16) unstable; urgency=low
+
+  * Added patch from Alfred M. Szmidt to support symlinks.
+
+ -- John Goerzen <jgoerzen@complete.org>  Tue,  1 Mar 2005 14:57:02 -0600
+
+tla-load-dirs (1.0.15) unstable; urgency=low
+
+  * Made load-dirs-common conflict with tla-load-dirs (<< 1.0.14).
+    Some files in earlier tla-load-dirs packages are now in
+    load-dirs-common.  Closes: #278102.
+
+ -- John Goerzen <jgoerzen@complete.org>  Sun, 24 Oct 2004 17:02:04 -0500
+
+tla-load-dirs (1.0.14) unstable; urgency=low
+
+  * Added darcs_load_dirs.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 13 Oct 2004 21:01:04 -0500
+
+tla-load-dirs (1.0.13) unstable; urgency=low
+
+  * Added python to build-deps.  Closes: #259136.
+
+ -- John Goerzen <jgoerzen@complete.org>  Tue, 13 Jul 2004 10:17:04 -0500
+
+tla-load-dirs (1.0.12) unstable; urgency=low
+
+  * Experimental: Let the tagging method be tagline.
+
+ -- John Goerzen <jgoerzen@complete.org>  Thu, 26 Feb 2004 22:00:07 -0600
+
+tla-load-dirs (1.0.11) unstable; urgency=low
+
+  * Added "upstream" changelog (from tla), note it in debian/rules,
+    and build-dep on a version of debhelper that can support it.
+  * Fixed tla version detection to work with the new "candidate" versions
+    in Debian sid.
+
+ -- John Goerzen <jgoerzen@complete.org>  Tue, 16 Dec 2003 16:16:46 -0600
+
+tla-load-dirs (1.0.10) unstable; urgency=low
+
+  * Added a manpage and description of the program's operation.
+    Closes: #214097.
+  * control s/Build-Depends/Build-Depends-Indep/
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 22 Oct 2003 16:55:19 -0500
+
+tla-load-dirs (1.0.9) unstable; urgency=low
+
+  * Fixed a typo in tla_wc.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 22 Oct 2003 13:52:37 -0500
+
+tla-load-dirs (1.0.8) unstable; urgency=low
+
+  * Now compatible with both 1.0 and the latest 1.1 snapshot.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed, 22 Oct 2003 13:27:54 -0500
+
+tla-load-dirs (1.0.7) unstable; urgency=low
+
+  * Various bugfixes.
+  * Added -s option to set the summary line.
+
+ -- John Goerzen <jgoerzen@complete.org>  Fri,  5 Sep 2003 09:43:03 -0500
+
+tla-load-dirs (1.0.6) unstable; urgency=low
+
+  * Made the verbose stuff actually do something more verbose.
+
+ -- John Goerzen <jgoerzen@complete.org>  Thu,  4 Sep 2003 09:36:06 -0500
+
+tla-load-dirs (1.0.5) unstable; urgency=low
+
+  * Slight logic error when adding multiple nested directories.
+    Now do the adds after the copyfrom().
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed,  3 Sep 2003 22:04:25 -0500
+
+tla-load-dirs (1.0.4) unstable; urgency=low
+
+  * Added better error checking of input.
+  * Added command to redraw screen.
+  * Many bugfixes.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed,  3 Sep 2003 15:09:18 -0500
+
+tla-load-dirs (1.0.3) unstable; urgency=low
+
+  * Now properly rm -rf's a directory that is removed.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed,  3 Sep 2003 14:32:39 -0500
+
+tla-load-dirs (1.0.2) unstable; urgency=low
+
+  * Fixed a small syntax error in tla_wc.py.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed,  3 Sep 2003 14:04:21 -0500
+
+tla-load-dirs (1.0.1) unstable; urgency=low
+
+  * New arg: --no-commit.
+  * Now strips off the path from the logged import directory.
+  * More aggressive clean target.
+  * Creates any directories necessary to move files into.
+  * Cleaned up output.
+
+ -- John Goerzen <jgoerzen@complete.org>  Wed,  3 Sep 2003 11:43:12 -0500
+
+tla-load-dirs (1.0.0) unstable; urgency=low
+
+  * Initial Release.  Closes: #208463.
+
+ -- John Goerzen <jgoerzen@complete.org>  Tue,  2 Sep 2003 20:10:45 -0500
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/compat	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,1 @@
+4
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/control	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,99 @@
+Source: vcs-load-dirs
+Section: devel
+Priority: optional
+Maintainer: John Goerzen <jgoerzen@complete.org>
+XS-Python-Version: current
+Build-Depends-Indep: debhelper (>= 5.0.37.2), gtk-doc-tools, sgml2x, docbook-utils, jade, python (>= 2.3), python-dev, python-central (>= 0.5)
+Standards-Version: 3.7.2
+Homepage: http://software.complete.org/vcs-load-dirs
+Vcs-Git: git://git.complete.org/vcs-load-dirs
+Vcs-Browser: http://git.complete.org/vcs-load-dirs
+
+Package: tla-load-dirs
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: load-dirs-common (= ${Source-Version}), tla, ${shlibs:Depends}, ${misc:Depends}, ${python:Depends}
+Description: Import upstream archives into tla/arch
+ Tom Lord's arch/tla system is capable of importing upstream directories
+ into a tla archive for some archives.  For situations where the upstream
+ renames or moves files and directories on a regular basis, version
+ information can be lost.
+ .
+ tla-load-dirs works with tla to import these things and preserve
+ changes.  It works on a principle similar to Subversion's
+ svn_load_dirs.
+
+Package: darcs-load-dirs
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: load-dirs-common (= ${Source-Version}), darcs, ${shlibs:Depends}, ${misc:Depends}, ${python:Depends}
+Description: Import upstream archives into darcs
+ Darcs works fine for importing new archives.  However, for situations
+ where the upstream renames or moves files and directories on a regular
+ basis, version information can be lost.
+ .
+ darcs-load-dirs works with darcs to import these things and preserve
+ changes.  It works on a principle similar to Subversion's
+ svn_load_dirs or my tla_load_dirs package for Arch.
+
+Package: hg-load-dirs
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: load-dirs-common (= ${Source-Version}), mercurial, ${shlibs:Depends}, ${misc:Depends}, ${python:Depends}
+Description: Import upstream archives into darcs
+ Mercurial works fine for importing new archives.  However, for situations
+ where the upstream renames or moves files and directories on a regular
+ basis, version information can be lost.
+ .
+ hg-load-dirs works with darcs to import these things and preserve
+ changes.  It works on a principle similar to Subversion's
+ svn_load_dirs or my tla_load_dirs package for Arch.
+
+Package: baz-load-dirs
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: load-dirs-common (= ${Source-Version}), bazaar, ${shlibs:Depends}, ${misc:Depends}, ${python:Depends}
+Description: Import upstream archives into baz
+ Bazaar (baz) works fine for importing new archives.  However, for situations
+ where the upstream renames or moves files and directories on a regular
+ basis, version information can be lost.
+ .
+ baz-load-dirs works with baz to import these things and preserve
+ changes.  It works on a principle similar to Subversion's
+ svn_load_dirs or my tla_load_dirs package for tla.
+
+Package: svk-load-dirs
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: load-dirs-common (= ${Source-Version}), svk, ${shlibs:Depends} ${misc:Depends} ${python:Depends}
+Description: Import upstream archives into svk
+ Svk works fine for importing new archives.  However, for situations
+ where the upstream renames or moves files and directories on a regular
+ basis, version information can be lost.
+ .
+ svk-load-dirs works with svk to import these things and preserve
+ changes.  It works on a principle similar to Subversion's
+ svn_load_dirs or my tla_load_dirs package for tla.
+
+Package: git-load-dirs
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: load-dirs-common (= ${Source-Version}), git-core, ${shlibs:Depends} ${misc:Depends} ${python:Depends}
+Description: Import upstream archives into git
+ Git works fine for importing new archives.  However, for situations
+ where the upstream renames or moves files and directories on a regular
+ basis, version information can be lost.
+ .
+ git-load-dirs works with git to import these things and preserve
+ changes.  It works on a principle similar to Subversion's
+ svn_load_dirs or my tla_load_dirs package for tla.
+
+Package: load-dirs-common
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${shlibs:Depends}, ${misc:Depends}, ${python:Depends}
+Conflicts: tla-load-dirs (<< 1.0.14)
+Description: Common files for tla-load-dirs and darcs-load-dirs
+ These two packages share much common code.  This package provides
+ that common code so you don't have to install it twice on your system.
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/copyright	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,26 @@
+This package was debianized by John Goerzen <jgoerzen@complete.org> on
+Tue,  2 Sep 2003 20:10:45 -0500.
+
+It was downloaded from the Arch repository at http://arch.complete.org/
+
+Upstream Author(s): John Goerzen <jgoerzen@complete.org>
+
+Copyright:
+
+# Copyright (C) 2003-2006 John Goerzen
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+You may find the GPL at /usr/share/common-licenses/GPL-2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/darcs-load-dirs.install	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,2 @@
+debian/tmp/usr/bin/darcs_load_dirs usr/bin
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/darcs-load-dirs.manpages	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,1 @@
+docs/darcs_load_dirs.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/dirs	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,2 @@
+usr/bin
+usr/lib
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/git-load-dirs.install	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,2 @@
+debian/tmp/usr/bin/git_load_dirs usr/bin
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/git-load-dirs.manpages	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,1 @@
+docs/git_load_dirs.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/hg-load-dirs.install	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,2 @@
+debian/tmp/usr/bin/hg_load_dirs usr/bin
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/hg-load-dirs.manpages	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,1 @@
+docs/hg_load_dirs.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/load-dirs-common.install	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,2 @@
+debian/tmp/usr/lib usr
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/pycompat	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,1 @@
+2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/rules	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,104 @@
+#!/usr/bin/make -f
+# Sample debian/rules that uses debhelper.
+# GNU copyright 1997 to 1999 by Joey Hess.
+
+# Uncomment this to turn on verbose mode.
+#export DH_VERBOSE=1
+
+PYTHON=python
+PACKAGE=vcs-load-dirs
+
+
+CFLAGS = -Wall -g
+
+ifneq (,$(findstring noopt,$(DEB_BUILD_OPTIONS)))
+	CFLAGS += -O0
+else
+	CFLAGS += -O2
+endif
+ifeq (,$(findstring nostrip,$(DEB_BUILD_OPTIONS)))
+	INSTALL_PROGRAM += -s
+endif
+
+configure: configure-stamp
+configure-stamp:
+	dh_testdir
+	# Add here commands to configure the package.
+
+	touch configure-stamp
+
+
+build: build-stamp
+
+build-stamp: configure-stamp 
+	dh_testdir
+
+	# Add here commands to compile the package.
+	#/usr/bin/docbook-to-man debian/tla-load-dirs.sgml > tla-load-dirs.1
+	cd docs && make manpages
+	$(PYTHON) setup.py build
+	touch build-stamp
+
+clean:
+	dh_testdir
+	dh_testroot
+	rm -f build-stamp configure-stamp
+
+	# Add here commands to clean up after the build process.
+	#-$(MAKE) clean
+	-$(PYTHON) setup.py clean
+	-rm -rf ./++* ./,,*
+	-rm -rf build
+	-cd docs; make realclean
+	dh_clean
+
+install: build
+	dh_testdir
+	dh_testroot
+	dh_clean -k
+	dh_installdirs
+
+	# Add here commands to install the package into debian/tla-load-dirs.
+	#$(MAKE) install DESTDIR=$(CURDIR)/debian/tla-load-dirs
+	mkdir debian/tmp
+	$(PYTHON) setup.py install --root=`pwd`/debian/tmp \
+		--no-compile
+
+
+binary-arch: build install
+# We have nothing to do by default.
+
+# Build architecture-dependent files here.
+binary-indep: build install
+	dh_testdir
+	dh_testroot
+	dh_installchangelogs -i
+	dh_installdocs -i
+	dh_installexamples -i
+	dh_install -i
+#	dh_installmenu
+#	dh_installdebconf	
+#	dh_installlogrotate
+#	dh_installemacsen
+#	dh_installpam
+#	dh_installmime
+#	dh_installinit
+#	dh_installcron
+#	dh_installinfo
+	dh_installman -i
+	dh_link -i
+	dh_strip -i
+	dh_compress -i
+	dh_fixperms -i
+#	dh_perl
+	dh_pycentral
+	dh_python -i
+#	dh_makeshlibs
+	dh_installdeb -i
+	dh_shlibdeps -i
+	dh_gencontrol -i
+	dh_md5sums -i
+	dh_builddeb -i
+
+binary: binary-indep binary-arch
+.PHONY: build clean binary-indep binary-arch binary install configure
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/svk-load-dirs.install	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,2 @@
+debian/tmp/usr/bin/svk_load_dirs usr/bin
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/svk-load-dirs.manpages	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,1 @@
+docs/svk_load_dirs.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/tla-load-dirs.install	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,2 @@
+debian/tmp/usr/bin/tla_load_dirs usr/bin
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/debian/tla-load-dirs.manpages	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,1 @@
+docs/tla_load_dirs.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/docs/Makefile	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,62 @@
+# Copyright (C) 2003-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+MASTERBASE := vcs_load_dirs
+TOPNODE := vcs_load_dirs
+BASICDEPS := $(MASTERBASE).sgml
+#DOINDEX := yes
+DOINDEX := no
+HTMLARGS := -s local
+PSARGS := -s local=printlocal
+
+all: ps
+ps: $(MASTERBASE).ps
+html: html/index.html
+text: $(MASTERBASE).txt
+
+manpages: tla_load_dirs.1 darcs_load_dirs.1 baz_load_dirs.1 svk_load_dirs.1 git_load_dirs.1 hg_load_dirs.1
+%.1: %.sgml
+	docbook2man $<
+
+darcs_load_dirs.1: vcs_load_dirs.1
+	sed -e s/vcs_load_dirs/darcs_load_dirs/g \
+	    -e s/VCS_LOAD_DIRS/DARCS_LOAD_DIRS/g < $< > $@
+
+hg_load_dirs.1: vcs_load_dirs.1
+	sed -e s/vcs_load_dirs/hg_load_dirs/g \
+	    -e s/VCS_LOAD_DIRS/HG_LOAD_DIRS/g < $< > $@
+
+tla_load_dirs.1: vcs_load_dirs.1
+	sed -e s/vcs_load_dirs/tla_load_dirs/g \
+	    -e s/VCS_LOAD_DIRS/TLA_LOAD_DIRS/g < $< > $@
+
+baz_load_dirs.1: vcs_load_dirs.1
+	sed -e s/vcs_load_dirs/baz_load_dirs/g \
+	    -e s/VCS_LOAD_DIRS/BAZ_LOAD_DIRS/g < $< > $@
+
+svk_load_dirs.1: vcs_load_dirs.1
+	sed -e s/vcs_load_dirs/svk_load_dirs/g \
+		-e s/VCS_LOAD_DIRS/SVK_LOAD_DIRS/g < $< > $@
+
+git_load_dirs.1: vcs_load_dirs.1
+	sed -e s/vcs_load_dirs/git_load_dirs/g \
+		-e s/VCS_LOAD_DIRS/GIT_LOAD_DIRS/g < $< > $@
+
+include sgml-common/Makefile.common
+
+realclean: clean
+	-rm -rf *.links *.refs *.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/docs/sgml-common/COPYING	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,342 @@
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+     59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Library General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	    How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) year  name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Library General
+Public License instead of this License.
+
+# arch-tag: License for sgml-common
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/docs/sgml-common/COPYRIGHT	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,39 @@
+Copyright for all code except ps2epsi
+-------------------------------------
+# Copyright (C) 2002, 2003 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+Copyright for ps2epsi
+---------------------
+This program is free software; you can redistribute it and/or modify it
+under the terms of the GNU General Public License as published by the
+Free Software Foundation; either version 2, or (at your option) any
+later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, 59 Temple Place - Suite 330,
+Boston, MA 02111-1307, USA.
+
+My local changes were to the page size.
+
+# arch-tag: Copyright statements and information for sgml-common
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/docs/sgml-common/ChangeLog	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,265 @@
+# do not edit -- automatically generated by arch changelog
+# arch-tag: automatic-ChangeLog--jgoerzen@complete.org--projects/sgml-common--head--1.0
+#
+
+2004-05-21 14:28:00 GMT	John Goerzen <jgoerzen@complete.org>	patch-20
+
+    Summary:
+      Fixed text target
+    Revision:
+      sgml-common--head--1.0--patch-20
+
+
+    modified files:
+     ChangeLog SConstruct
+
+
+2004-02-27 15:22:59 GMT	John Goerzen <jgoerzen@complete.org>	patch-19
+
+    Summary:
+      Added manpage generation support
+    Revision:
+      sgml-common--head--1.0--patch-19
+
+
+    modified files:
+     ChangeLog SConstruct
+
+
+2004-02-03 19:50:11 GMT	John Goerzen <jgoerzen@complete.org>	patch-18
+
+    Summary:
+      More clearing up of REs
+    Revision:
+      sgml-common--head--1.0--patch-18
+
+
+    modified files:
+     ChangeLog SConstruct
+
+
+2004-02-03 19:40:22 GMT	John Goerzen <jgoerzen@complete.org>	patch-17
+
+    Summary:
+      Tightened up re for finding image tags
+    Revision:
+      sgml-common--head--1.0--patch-17
+
+
+    modified files:
+     ChangeLog SConstruct
+
+
+2004-02-03 19:28:03 GMT	John Goerzen <jgoerzen@complete.org>	patch-16
+
+    Summary:
+      More changes to support scanner
+    Revision:
+      sgml-common--head--1.0--patch-16
+
+    Removed outdated code and made more bugfixes relevant to documents without
+    images or with only pre-generated images.
+
+    new files:
+     ChangeLog
+
+    modified files:
+     SConstruct
+
+
+2004-02-03 18:41:51 GMT	John Goerzen <jgoerzen@complete.org>	patch-15
+
+    Summary:
+      HTML gen now basically functional with scanning
+    Revision:
+      sgml-common--head--1.0--patch-15
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-03 17:41:51 GMT	John Goerzen <jgoerzen@complete.org>	patch-14
+
+    Summary:
+      Auto-scanning is now close for PDFs.
+    Revision:
+      sgml-common--head--1.0--patch-14
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-03 16:59:02 GMT	John Goerzen <jgoerzen@complete.org>	patch-13
+
+    Summary:
+      Scanners starting to work
+    Revision:
+      sgml-common--head--1.0--patch-13
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-03 16:22:51 GMT	John Goerzen <jgoerzen@complete.org>	patch-12
+
+    Summary:
+      Cleaned up HTML situation
+    Revision:
+      sgml-common--head--1.0--patch-12
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-02 22:33:50 GMT	John Goerzen <jgoerzen@complete.org>	patch-11
+
+    Summary:
+      SConstruct file now working
+    Revision:
+      sgml-common--head--1.0--patch-11
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-02 22:07:27 GMT	John Goerzen <jgoerzen@complete.org>	patch-10
+
+    Summary:
+      Fixed nasty PNG gen bug
+    Revision:
+      sgml-common--head--1.0--patch-10
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-02 21:57:32 GMT	John Goerzen <jgoerzen@complete.org>	patch-9
+
+    Summary:
+      Checkpointing some more...
+    Revision:
+      sgml-common--head--1.0--patch-9
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-02 21:37:29 GMT	John Goerzen <jgoerzen@complete.org>	patch-8
+
+    Summary:
+      Checkpointing some more...
+    Revision:
+      sgml-common--head--1.0--patch-8
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-02 20:19:02 GMT	John Goerzen <jgoerzen@complete.org>	patch-7
+
+    Summary:
+      Checkpointing
+    Revision:
+      sgml-common--head--1.0--patch-7
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-02 19:18:40 GMT	John Goerzen <jgoerzen@complete.org>	patch-6
+
+    Summary:
+      Added Plucker
+    Revision:
+      sgml-common--head--1.0--patch-6
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-02 19:09:16 GMT	John Goerzen <jgoerzen@complete.org>	patch-5
+
+    Summary:
+      Added cleanup rules
+    Revision:
+      sgml-common--head--1.0--patch-5
+
+
+    modified files:
+     SConstruct
+
+
+2004-02-02 18:58:11 GMT	John Goerzen <jgoerzen@complete.org>	patch-4
+
+    Summary:
+      sources now checks only chapters/ to prevent dep cycle with index
+    Revision:
+      sgml-common--head--1.0--patch-4
+
+
+    modified files:
+     Makefile.common SConstruct {arch}/=tagging-method
+
+
+2004-02-02 17:51:27 GMT	John Goerzen <jgoerzen@complete.org>	patch-3
+
+    Summary:
+      Experimental SCons conversion
+    Revision:
+      sgml-common--head--1.0--patch-3
+
+
+    new files:
+     SConstruct
+
+
+2003-10-21 20:24:04 GMT	John Goerzen <jgoerzen@complete.org>	patch-2
+
+    Summary:
+      Added plain text generation target
+    Revision:
+      sgml-common--head--1.0--patch-2
+
+    Added plain text generation target
+    
+
+    modified files:
+     ./Makefile.common
+
+
+2003-09-10 14:27:58 GMT	John Goerzen <jgoerzen@complete.org>	patch-1
+
+    Summary:
+      Minor updates to PNG generation and gtk-doc icon locations
+    Revision:
+      sgml-common--head--1.0--patch-1
+
+
+    modified files:
+     Makefile.common
+
+
+2003-09-10 14:24:24 GMT	John Goerzen <jgoerzen@complete.org>	base-0
+
+    Summary:
+      initial import
+    Revision:
+      sgml-common--head--1.0--base-0
+
+    
+    (automatically generated log message)
+
+    new files:
+     COPYING COPYRIGHT Makefile.common ps2epsi
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/docs/sgml-common/Makefile.common	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,229 @@
+# -*- Mode: makefile; -*-
+# arch-tag: Primary sgml-common top-level Makefile
+# Common Makefile for SGML documents
+#
+# Copyright (C) 2002, 2003 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+# The following variables should be set:
+# MASTERBASE -- basename of master file -- example: my-guide
+# BASICDEPS -- various dependencies of the master file.  For instance,
+#   this might include files included in the SGML.  It could also be empty.
+# TOPNODE -- Basename of top id for HTML link.
+
+MASTER := $(MASTERBASE).sgml
+FIGUREDIRS := $(wildcard figures/*)
+DOINDEX ?= yes
+
+######################################################################
+# Index generation
+######################################################################
+
+ifeq ($(DOINDEX), yes)
+
+INDEXSGMLFILE := index/index.sgml
+INDEXDATAFILE := index/HTML.index
+
+$(INDEXSGMLFILE): $(INDEXDATAFILE)
+	@echo " *** Generating SGML index from index list"
+	collateindex.pl -i ch.index -g -o index/index.sgml index/HTML.index
+
+$(INDEXDATAFILE): $(MASTER) $(BASICDEPS)
+#	jade -t sgml -d docbook.dsl -V html-index $(MASTER)
+#	jade -t sgml -V html-index $(MASTER)
+	@echo " *** Generating index list from document"
+	-rm -r index
+	mkdir index
+	collateindex.pl -i ch.index -N -o index/index.sgml
+	#mkdir html-temp
+	#docbook2html --output html-temp -V html-index $(MASTER)
+	docbook-2-html -O -V -O html-index $(HTMLARGS) $(MASTER)
+	mv $(MASTERBASE)-html/HTML.index index/
+	rm -r $(MASTERBASE)-html
+endif # DOINDEX
+
+######################################################################
+# Text generation
+######################################################################
+$(MASTERBASE).txt: $(MASTER) $(BASICDEPS) $(INDEXSGMLFILE)
+	@echo " *** Generating text output"
+	docbook2txt $(MASTER)
+
+######################################################################
+# PostScript generation
+######################################################################
+
+$(MASTERBASE).ps: $(MASTER) $(BASICDEPS) $(INDEXSGMLFILE) $(EPSFILES)
+	@echo " *** Generating PostScript output"
+# This works too: docbook2ps -V paper-size=Letter $(MASTER)
+	docbook-2-ps -q -O -V -O paper-size=Letter $(PSARGS) $(MASTER)
+
+######################################################################
+# Figure generation
+######################################################################
+
+%_1.epi: %.ps
+	$(get-epi)
+%_2.epi: %.ps
+	$(get-epi)
+%_3.epi: %.ps
+	$(get-epi)
+%_4.epi: %.ps
+	$(get-epi)
+%_5.epi: %.ps
+	$(get-epi)
+%_6.epi: %.ps
+	$(get-epi)
+%_7.epi: %.ps
+	$(get-epi)
+%_8.epi: %.ps
+	$(get-epi)
+%_9.epi: %.ps
+	$(get-epi)
+%_10.epi: %.ps
+	$(get-epi)
+%_11.epi: %.ps
+	$(get-epi)
+%_12.epi: %.ps
+	$(get-epi)
+
+%_1_l.epi: %.ps
+	$(get-epil)
+%_2_l.epi: %.ps
+	$(get-epil)
+%_3_l.epi: %.ps
+	$(get-epil)
+%_4_l.epi: %.ps
+	$(get-epil)
+%_5_l.epi: %.ps
+	$(get-epil)
+%_6_l.epi: %.ps
+	$(get-epil)
+%_7_l.epi: %.ps
+	$(get-epil)
+%_8_l.epi: %.ps
+	$(get-epil)
+%_9_l.epi: %.ps
+	$(get-epil)
+%_10_l.epi: %.ps
+	$(get-epil)
+%_11_l.epi: %.ps
+	$(get-epil)
+%_12_l.epi: %.ps
+	$(get-epil)
+
+%.png: %_l.epi
+	@echo " *** Generating PNG image for $<"
+	gs -q -dTextAlphaBits=4 -dGraphicsAlphaBits=4 -r90 -dBATCH -dNOPAUSE \
+		-dSAFER -sOutputFile=$@ -sDEVICE=png16m $< -c showpage
+
+%.ps: %.pdf
+	pdftops $<
+
+######################################################################
+# HTML generation
+######################################################################
+
+define copy-figures-worker
+mkdir html/figures
+for DIRECTORY in $(FIGUREDIRS); do mkdir html/$$DIRECTORY; cp -v $$DIRECTORY/*.png html/$$DIRECTORY/; done
+endef
+
+define copy-figures
+$(if $(FIGUREDIRS),$(copy-figures-worker))
+endef
+
+html/index.html: $(MASTER) $(BASICDEPS) $(INDEXSGMLFILE) $(PNGFILES)
+	@echo " *** Generating HTML output"
+	-rm -r html
+	mkdir html
+	#docbook2html --output html $(MASTER)
+	docbook-2-html $(HTMLARGS) $(MASTER)
+	mv $(MASTERBASE)-html/* html/
+	rmdir $(MASTERBASE)-html
+	$(copy-figures)
+#	tidy -m html/*.html
+	ln -s $(TOPNODE).html html/index.html
+	-cp -v /usr/share/gtk-doc/data/*.png html/
+
+######################################################################
+# Cleaning
+######################################################################
+
+clean:
+	-rm -f `find . -name "*~"` `find . -name "*.png"` `find . -name "*.epi"`
+	-rm -r html-temp html index $(MASTERBASE).txt
+	-rm *.aux *.log *.dvi *.tex *.jtex *.ps *.html *.log *.out jadetex.cfg
+	-rm *.ps html/*.html figures/topology/*.epi figures/topology/*.png
+	-rm *.log *.pdb
+	-rm `find . -name ".ps"` `find . -name "*.epi"` *.pdf
+	-rm `find . -name "*.png"`
+
+######################################################################
+# Utility functions
+######################################################################
+
+GETPAGE=$(shell echo $(1) | sed -e "s/^.*_\([0-9]*\).epi/\\1/g")
+define get-epi
+@echo " *** Generating EPI image for $<"
+psselect -q $(call GETPAGE,$@) $< temp.ps
+psresize -w 6.375in -h 8.25in temp.ps temp2.ps
+../sgml-common/ps2epsi temp2.ps $@
+rm temp.ps temp2.ps
+endef
+
+GETPAGEL=$(shell echo $(1) | sed -e "s/^.*_\([0-9]*\)_l.epi/\\1/g")
+define get-epil
+@echo " *** Generating large EPI image for $<"
+psselect -q $(call GETPAGEL,$@) $< temp.ps
+psresize -w 8.5in -h 11in temp.ps temp2.ps
+../sgml-common/ps2epsi temp2.ps $@
+rm temp.ps temp2.ps
+endef
+
+
+pdf: $(MASTERBASE).pdf
+
+$(MASTERBASE).pdf: $(MASTERBASE).ps
+	ps2pdf14 $(MASTERBASE).ps
+
+plucker: $(MASTERBASE).pdb
+$(MASTERBASE).pdb: html
+	plucker-build --bpp=4 --compression=zlib --doc-name="$(MASTERBASE)" \
+	-H file:`pwd`/html/index.html -M 5 \
+	--maxheight=320 --maxwidth=310 \
+	--staybelow=file:`pwd`/html --title="$(MASTERBASE)" -p . \
+	-f $(MASTERBASE)
+
+###########################################################################
+# These are obsolete but should still work.
+###########################################################################
+
+
+$(MASTERBASE).dvi: $(MASTERBASE).tex
+	@echo " *** Generating DVI file."
+	jadetex unix-guide.tex
+	jadetex unix-guide.tex
+	jadetex unix-guide.tex
+
+$(MASTERBASE).tex: $(MASTER) $(BASICDEPS) $(INDEXSGMLFILE)
+	@echo " *** Generating TeX files."
+	docbook2tex -V paper-size=Letter $(MASTER)
+#	jade -t tex -V tex-backend -d \
+#		/usr/share/sgml/docbook/stylesheet/dsssl/modular/print/docbook.dsl \
+#		$(MASTER)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/docs/sgml-common/SConstruct	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,208 @@
+# vim: set filetype=python :
+# arch-tag: general-purpose SCons build file for sgml-common
+
+from glob import glob
+import os, re
+
+############################################################
+# Setup
+############################################################
+
+SConsignFile('.sconsign-master')
+#Import('env')
+d = env.Dictionary()
+if not 'JADE' in d:
+    d['JADE'] = 'jade'
+if not 'INDEXNODE' in d:
+    d['INDEXNODE'] = 'ch.index'
+if not 'GTKIMGPATH' in d:
+    d['GTKIMGPATH'] = '/usr/share/gtk-doc/data'
+if not 'PS2EPSI' in d:
+    d['PS2EPSI'] = '../sgml-common/ps2epsi'
+
+def removeindex(l):
+    while 'index/index.sgml' in l:
+        l.remove('index/index.sgml')
+
+master = d['MASTERBASE'] 
+mastersgml = master + '.sgml'
+sources = [mastersgml] + glob('*/*.sgml') + glob('*/*/*.sgml')
+removeindex(sources)
+db2htmlcmd = 'docbook-2-html -D $JADE ${HTMLARGS}  ${SOURCE}'
+db2htmlindexcmd = 'docbook-2-html -D $JADE -O -V -O html-index ${HTMLARGS} ${SOURCE}'
+
+##################################################
+# SCANNERS
+##################################################
+def recursescan(scanner, node, env):
+    result = scanner(node, env) 
+    retval = []
+    for item in result:
+        retval.append(item)
+        retval.extend(recursescan(scanner, item, env))
+    return retval
+
+SGML_includes_re = re.compile(r'<!ENTITY[^>]+SYSTEM[^>]+"(.+)"', re.M)
+def SGML_includes_scan(node, env, path):
+    ret = SGML_includes_re.findall(node.get_contents())
+    removeindex(ret)
+    return ret
+
+SGML_includes_scanner = Scanner(name = 'SGML_includes',
+    function = SGML_includes_scan, recursive = 1, skeys = ['.sgml', '.ent'])
+
+SGML_image_pdf_re = re.compile(r'<(graphic|imagedata).+?fileref="([^"]+\.pdf)"', re.S)
+SGML_image_png_re = re.compile(r'<(graphic|imagedata).+?fileref="([^"]+\.png)"', re.S)
+def SGML_image_scanner(node, env, path, arg):
+    root, ext = os.path.splitext(str(node))
+    contents = node.get_contents()
+    return SGML_includes_scan(node, env, path) + \
+            [os.getcwd() + '/' + x[1] for x in arg.findall(contents)]
+
+SGML_pdf_scanner = Scanner(name = 'SGML_pdf',
+         function = SGML_image_scanner, argument = SGML_image_pdf_re,
+         recursive = 1)
+SGML_png_scanner = Scanner(name = 'SGML_png',
+        function = SGML_image_scanner, argument = SGML_image_png_re,
+        recursive = 1)
+
+##################################################
+# BUILDERS
+##################################################
+
+#### PLAIN TEXT
+Btxt = Builder(action="docbook2txt $SOURCE", src_suffix='.sgml', suffix='.txt')
+
+#### PDF / POSTSCRIPT
+Bpdf = Builder(action="docbook-2-pdf -D ${JADE} -q -O -V -O paper-size=Letter ${PDFARGS} ${SOURCE}",
+        src_suffix='.sgml', suffix='.pdf')
+Bpdf2ps = Builder(action="pdftops ${SOURCE}", src_suffix='.pdf', suffix='.ps')
+
+#### MAN PAGES
+# FIXME: test this
+Bman = Builder(action="docbook2man $SOURCE", src_suffix='.sgml', suffix='.1')
+
+#### HTML
+Bhtml = Builder(action = [ \
+        'if test -d ${TARGET.dir} ; then rm -r ${TARGET.dir} ; fi',
+        'mkdir ${TARGET.dir}',
+        db2htmlcmd,
+        'mv ${MASTERBASE}-html/* ${TARGET.dir}/',
+        'rmdir ${MASTERBASE}-html',
+        'ln -s ${TOPNODE}.html ${TARGET.dir}/index.html',
+        'cp ${GTKIMGPATH}/*.png ${TARGET.dir}/'])
+
+#### PNG
+Bepip2png = Builder(action = 'gs -q -dTextAlphaBits=4 -dGraphicsAlphaBits=4 ' +\
+        '-r90 -dBATCH -dNOPAUSE -dSAFER -sOutputFile=$TARGET ' + \
+        '-sDEVICE=png16m $SOURCE -c showpage', suffix='.png', src_suffix='.pngepi')
+
+#### EPI from PS
+def getpagenumfromname(target, source, env, for_signature):
+    return re.search('^.*_(\d+)\.(png){0,1}epi$', str(target[0])).group(1)
+d['GETPAGE'] = getpagenumfromname
+
+Aps2epi = Action(['psselect -q ${GETPAGE} $SOURCE temp.ps',
+        'psresize -w ${WIDTH} -h ${HEIGHT} temp.ps temp2.ps',
+        '$PS2EPSI temp2.ps $TARGET',
+        'rm temp.ps temp2.ps'])
+Bps2epi = Builder(action=Aps2epi, src_suffix='.ps', suffix='.epi')
+Bps2epip = Builder(action=Aps2epi, src_suffix='.ps', suffix='.pngepi')
+Bepi2pdf = Builder(action="epstopdf -o=${TARGET} ${SOURCE}", suffix='.pdf',
+        src_suffix='.epi')
+
+#### PLUCKER
+Bplucker = Builder(action = 'plucker-build --bpp=4 --compression=zlib ' + \
+        '--doc-name="${MASTERBASE}" -H file:${SOURCE.abspath} -M 5 ' + \
+        '--maxheight=320 --maxwidth=310 --staybelow=file:`pwd`/${SOURCE.dir} ' + \
+        '--title="${MASTERBASE}" -p . -f ${MASTERBASE}')
+
+##################################################
+# General setup
+##################################################
+
+env.Append(BUILDERS = {'Text': Btxt, 'PDF2PS': Bpdf2ps, 'PDF': Bpdf, 'HTML': Bhtml,
+        'Plucker': Bplucker, 'PS2EPI': Bps2epi, 'PS2EPIP': Bps2epip,
+        'EPI2PDF': Bepi2pdf, 'EPIP2PNG': Bepip2png, 'MAN': Bman})
+
+#### INDEX GENERATION
+if 'DOINDEX' in d:
+    Bindex = Builder(action = ['if test -d ${TARGET.dir}  ; then rm -r ${TARGET.dir} ; fi',
+        "mkdir ${TARGET.dir}",
+        "collateindex.pl -i $INDEXNODE -N -o $TARGET",
+        db2htmlindexcmd,
+        "mv ${MASTERBASE}-html/HTML.index ${TARGET.dir}/",
+        "rm -r ${MASTERBASE}-html",
+        "collateindex.pl -i $INDEXNODE -g -o $TARGET ${TARGET.dir}/HTML.index"])
+    env['BUILDERS']['Index'] = Bindex
+    index = env.Index('index/index.sgml', mastersgml)
+    env.Depends(index, sources)
+    env.Clean(index, 'index')
+    deps = sources + [index]
+else:
+    deps = sources
+
+##################################################
+# BUILD RULES
+###################################################
+# Text
+text = env.Text(mastersgml)
+env.Depends(text, deps)
+env.Alias('text', text)
+
+# PDF
+pdfsgml = File(mastersgml)
+pdf = env.PDF(pdfsgml)
+figsindoc = [x for x in recursescan(SGML_pdf_scanner, pdfsgml, env) if str(x).endswith('.pdf')]
+epipdf = []
+for file in figsindoc:
+    pdfname = re.sub('_\d+\.pdf$', '.pdf', str(file))
+    if pdfname == str(file):
+        # This is not a filename that fits our pattern; add unmodified.
+        epipdf.append(file)
+        continue
+    psfile = env.PDF2PS(source = pdfname)
+    epifile = env.PS2EPI(str(file).replace(".pdf", ".epi"), psfile,
+            WIDTH='6.375in', HEIGHT='8.25in')
+    epipdf.append(env.EPI2PDF(source = epifile))
+
+env.Depends(pdf, deps)
+env.Depends(pdf, epipdf)
+env.Alias('pdf', pdf)
+env.Clean(pdf, ['jadetex.cfg', '${MASTERBASE}.aux', '${MASTERBASE}.dvi',
+        '${MASTERBASE}.jtex', '${MASTERBASE}.log', '${MASTERBASE}.out',
+        'jade-out.fot'])
+
+# PS
+ps = env.PDF2PS(source = pdf)
+env.Alias('ps', ps)
+
+# HTML
+htmlsgml = File(mastersgml)
+buildhtml = env.HTML('html/index.html', htmlsgml)
+figsindoc = [x for x in recursescan(SGML_png_scanner, htmlsgml, env) if str(x).endswith('.png')]
+epipng = []
+for file in figsindoc:
+    pdfname = re.sub('_\d+\.png$', '.pdf', str(file))
+    if pdfname == str(file):
+        # This is not a filename that fits our pattern; add unmodified. 
+        epipng.append(file)
+        continue
+    psfile = env.PDF2PS(source = pdfname)
+    epifile = env.PS2EPIP(str(file).replace(".png", ".pngepi"), psfile,
+            WIDTH='8.5in', HEIGHT='11in')
+    epipng.append(env.EPIP2PNG(source = epifile))
+
+env.Depends(buildhtml, epipng)
+env.Depends(buildhtml, deps)
+pnginstalls = env.InstallAs(['html/' + str(x) for x in epipng], epipng)
+env.Depends(pnginstalls, buildhtml)
+html = env.Alias('html', buildhtml)
+html = env.Alias('html', pnginstalls)
+env.Clean(buildhtml, 'html')
+
+# Plucker
+plucker = env.Plucker(master + '.pdb', 'html/index.html')
+env.Alias('plucker', plucker)
+
+env.Default(html)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/docs/sgml-common/ps2epsi	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,77 @@
+#!/bin/sh
+# $RCSfile: ps2epsi,v $ $Revision: 1.4.2.2 $
+# arch-tag: ps2epsi customized for sgml-common
+
+tmpfile=/tmp/ps2epsi$$
+
+export outfile
+
+if [ $# -lt 1 -o $# -gt 2 ]; then
+	echo "Usage: `basename $0` file.ps [file.epsi]" 1>&2
+	exit 1
+fi
+
+infile=$1;
+
+if [ $# -eq 1 ]
+then
+	case "${infile}" in
+	  *.ps)		base=`basename ${infile} .ps` ;;
+	  *.cps)	base=`basename ${infile} .cps` ;;
+	  *.eps)	base=`basename ${infile} .eps` ;;
+	  *.epsf)	base=`basename ${infile} .epsf` ;;
+	  *)		base=`basename ${infile}` ;;
+	esac
+	outfile=${base}.epsi
+else
+	outfile=$2
+fi
+
+ls -l ${infile} |
+awk 'F==1	{
+		cd="%%CreationDate: " $6 " " $7 " " $8;
+		t="%%Title: " $9;
+		f="%%For:" U " " $3;
+		c="%%Creator: Ghostscript ps2epsi from " $9;
+		next;
+		}
+	/^%!/	{next;}
+	/^%%Title:/	{t=$0; next;}
+	/^%%Creator:/	{c=$0; next;}
+	/^%%CreationDate:/	{cd=$0; next;}
+	/^%%For:/	{f=$0; next;}
+	!/^%/	{
+		print "/ps2edict 30 dict def";
+		print "ps2edict begin";
+		print "/epsititle (" t "\\n) def";
+		print "/epsicreator (" c "\\n) def";
+		print "/epsicrdt (" cd "\\n) def";
+		print "/epsifor (" f "\\n) def";
+		print "end";
+		exit(0);
+		}
+	' U="$USERNAME$LOGNAME"  F=1 - F=2 ${infile} >$tmpfile
+
+gs -q -dNOPAUSE -dSAFER -dDELAYSAFER -r72 -sDEVICE=bit -sOutputFile=/dev/null $tmpfile ps2epsi.ps $tmpfile <${infile} 1>&2
+rm -f $tmpfile
+
+(
+cat << BEGINEPS
+save countdictstack mark newpath /showpage {} def /setpagedevice {pop} def
+%%EndProlog
+%%Page 1 1
+BEGINEPS
+
+cat ${infile} |
+sed -e '/^%%BeginPreview:/,/^%%EndPreview[^!-~]*$/d' -e '/^%!PS-Adobe/d'\
+    -e '/^%%[A-Za-z][A-Za-z]*[^!-~]*$/d' -e '/^%%[A-Za-z][A-Za-z]*: /d'
+
+cat << ENDEPS
+%%Trailer
+cleartomark countdictstack exch sub { end } repeat restore
+%%EOF
+ENDEPS
+
+) >> ${outfile}
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/docs/vcs_load_dirs.sgml	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,314 @@
+<!DOCTYPE reference PUBLIC "-//OASIS//DTD DocBook V4.1//EN" [
+  <!ENTITY vld "<application>vcs_load_dirs</application>">
+]>
+<!--      "file:///usr/share/sgml/docbook/dtd/xml/4.2/docbookx.dtd"> -->
+
+<reference>
+  <title>vcs_load_dirs Manual</title>
+
+  <refentry>
+    <refentryinfo>
+      <address><email>jgoerzen@complete.org</email></address>
+      <author><firstname>John</firstname><surname>Goerzen</surname></author>
+    </refentryinfo>
+
+    <refmeta>
+      <refentrytitle>vcs_load_dirs</refentrytitle>
+      <manvolnum>1</manvolnum>
+      <refmiscinfo>John Goerzen</refmiscinfo>
+    </refmeta>
+
+    <refnamediv>
+      <refname>vcs_load_dirs</refname>
+      <refpurpose>Import upstream archives into tla, hg, darcs, or git and handle renames</refpurpose>
+    </refnamediv>
+
+    <refsynopsisdiv>
+      <cmdsynopsis>
+        <command>vcs_load_dirs</command>
+        <group>
+          <arg>-w<replaceable>WC</replaceable></arg>
+          <arg>--wc=<replaceable>WC</replaceable></arg>
+        </group>
+        <group>
+          <arg>-l<replaceable>FILE</replaceable></arg>
+          <arg>--log=<replaceable>FILE</replaceable></arg>
+        </group>
+        <group>
+          <arg>-L<replaceable>TEXT</replaceable></arg>
+          <arg>--log-message=<replaceable>TEXT</replaceable></arg>
+        </group>
+        <group>
+          <arg>-s<replaceable>MSG</replaceable></arg>
+          <arg>--summary=<replaceable>MSG</replaceable></arg>
+        </group>
+        <group>
+          <arg>-v</arg>
+          <arg>--verbose</arg>
+        </group>
+        <group>
+          <arg>-n</arg>
+          <arg>--no-commit</arg>
+        </group>
+        <group>
+          <arg>-f</arg>
+          <arg>--fs-changes-only</arg>
+        </group>
+        <arg choice="plain">vendor_source_dir</arg>
+      </cmdsynopsis>
+
+      <cmdsynopsis>
+	<command>vcs_load_dirs</command>
+        <arg choice="plain">--version</arg>
+      </cmdsynopsis>
+
+      <cmdsynopsis>
+        <command>vcs_load_dirs</command>
+        <group>
+          <arg choice="plain">--help</arg>
+          <arg choice="plain">-h</arg>
+        </group>
+      </cmdsynopsis>
+
+    </refsynopsisdiv>
+
+    <refsect1>
+      <title>Description</title>
+      <para>
+        &vld; is a tool to help you import archives from people that
+        do not use Arch, Mercurial, Darcs, or Git into an Arch,
+        Mercurial, Darcs, or Git
+        archive.  Along the way, it takes
+        care of all the typical hassles: it will create tags in the
+        repository for new files; delete tags for removed files;
+        and move tags for files that have themselves been moved.
+        &vld; can handle both first imports of a package and
+        successive imports.
+      </para>
+      <para>
+        For those familiar with Subversion, you will know that it has
+        a tool called <application>svn_load_dirs</application>.  &vld;
+        is modeled after that program, but contains various
+        improvements for ease of use.
+      </para>
+    </refsect1>
+
+    <refsect1>
+      <title>Usage</title>
+      <para>
+        In the simplest case, you will simply run &vld; from your
+        working directory, passing it the name of the directory
+        containing the new upstream sources.  &vld will run, usually
+        making and commiting changes without any need of greater
+        assistance from you.
+      </para>
+      <refsect2>
+        <title>Renamed Files</title>
+        <para>
+          &vld; can automatically detect added files and deleted files
+          if they occur in isolation.  However, it cannot
+          automatically tell the difference between a delete and an
+          add, and a rename.  (There is not enough information in
+          simple upstream imports to capture the intent.)  
+          Therefore, &vld; will prompt you to solve the problem.
+          You can opt to ignore it -- &vld; will just issue deletes
+          and adds as appropriate -- but then your history will be
+          harder to track if there were indeed renames along the way.
+        </para>
+        <para>
+          You will be presented with a four-column listing.  Those
+          columns, in order from left to right, represent:
+        </para>
+        <orderedlist>
+          <listitem><para>File number for deleted file</para>
+          </listitem>
+          <listitem><para>File name of deleted file</para>
+          </listitem>
+          <listitem><para>File number of added file</para>
+          </listitem>
+          <listitem><para>File name of added file</para>
+          </listitem>
+        </orderedlist>
+        <para>
+          You have three options at this point: you can press q to
+          indicate that there are no more renames; you can press r to
+          request the list be re-drawn, or you can supply one or more
+          renames.
+        </para>
+        <para>
+          A rename uses the left column as the old name and the right
+          column as the new name.  To save you typing, you just enter
+          the appropriate file numbers separated by spaces.  So, if
+          the file at position 5 on the left moved to position 1b on
+          the right, just enter "5 1b".  After doing that, the list
+          will be automatically re-drawn, and you can either make
+          further selections or exit.
+        </para>
+        <para>
+          If you want to rename several files at once, enter your
+          rename commands separated by commas.  For instance, "5 1b, 6
+          1c".
+        </para>
+      </refsect2>
+    </refsect1>
+
+    <refsect1>
+      <title>Options</title>
+      
+      <variablelist>
+        <varlistentry><term>-h</term>
+          <term>--help</term>
+          <listitem><para>Displays a brief summary of the available
+              options and exits.</para>
+          </listitem>
+        </varlistentry>
+
+        <varlistentry><term>-l<replaceable>FILE</replaceable></term>
+          <term>--log=<replaceable>FILE</replaceable></term>
+          <listitem><para>
+              Add the contents of <replaceable>FILE</replaceable> to
+              the end of the log message automatically generated by
+              &vld; for a commit/record.
+            </para>
+          </listitem>
+        </varlistentry>
+
+        <varlistentry><term>-L<replaceable>TEXT</replaceable></term>
+          <term>--log-message=<replaceable>TEXT</replaceable></term>
+          <listitem><para>
+              Add the <replaceable>TEXT</replaceable> specified on the
+              command line to the log message.
+            </para>
+          </listitem>
+        </varlistentry>
+
+        <varlistentry><term>-s<replaceable>MSG</replaceable></term>
+          <term>--summary=<replaceable>MSG</replaceable></term>
+          <listitem>
+            <para>
+              Sets the log summary line to the
+              <replaceable>MSG</replaceable> supplied on the command
+              line.  This operation requires Darcs, Mercurial, or 
+              tla 1.1 or above; tla 1.0
+              does not provide the features necessary to implement it.
+            </para>
+          </listitem>
+        </varlistentry>
+
+        <varlistentry><term>-n</term>
+          <term>--no-commit</term>
+          <listitem><para>
+              Do not commit changes to the target VCS.
+            </para>
+          </listitem>
+        </varlistentry>
+
+        <varlistentry><term>-f</term>
+          <term>--fs-changes-only</term>
+          <listitem><para>
+              Move files in the filesystem, but do not inform VCS of
+              changes.  You will almost certainly want to use this
+              with -n.  This also suppresses the interactive interface.
+            </para>
+          </listitem>
+        </varlistentry>
+              
+        <varlistentry><term>-v</term>
+          <term>--verbose</term>
+          <listitem><para>Causes &vld; to display additional status
+              information.  You will see the darcs/tla/git/hg commands that are
+              being run behind the scenes, and some executed commands
+              may receive arguments asking them to be more verbose as
+              well.
+            </para>
+          </listitem>
+        </varlistentry>
+
+        <varlistentry><term>--version</term>
+          <listitem><para>Causes &vld; to print its version number and
+              exit.
+            </para>
+          </listitem>
+        </varlistentry>
+
+        <varlistentry><term>-w<replaceable>WC</replaceable></term>
+          <term>--wc=<replaceable>WC</replaceable></term>
+          <listitem>
+            <para>
+              Explicitly set the working copy directory to
+              <replaceable>WC</replaceable>.  If not specified,
+              defaults to the current working directory.
+            </para>
+          </listitem>
+        </varlistentry>        
+      </variablelist>
+    </refsect1>
+    <refsect1>
+      <title>Copyright</title>
+      <para>&vld;, and this manual, are Copyright &copy; 2003-2007 John Goerzen.</para>
+
+      <para>
+	This program is free software; you can redistribute it and/or modify
+	it under the terms of the GNU General Public License as published by
+	the Free Software Foundation; either version 2 of the License, or
+	(at your option) any later version.
+      </para>
+
+      <para>
+	This program is distributed in the hope that it will be useful,
+	but WITHOUT ANY WARRANTY; without even the implied warranty of
+	MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+	GNU General Public License for more details.
+      </para>
+
+      <para>
+	You should have received a copy of the GNU General Public License
+	along with this program; if not, write to the Free Software
+	Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA</para>
+
+    </refsect1>
+
+    <refsect1>
+      <title>Author</title>
+      <para>&vld;, its libraries, documentation, and all included files, except where
+	noted, was written by John Goerzen <email>jgoerzen@complete.org</email> and
+	copyright is held as stated in the COPYRIGHT section.
+      </para>
+
+      <para>
+        The homepage for &vld; is
+        <ulink
+        url="http://software.complete.org/vcs-load-dirs"></ulink>
+      </para>
+      <para>
+        This package may be downloaded from
+        <ulink
+          url="http://packages.debian.org/vcs-load-dirs"></ulink>.
+        darcs users may also obtain the latest version with:
+        <screen>
+$ darcs get --partial http://darcs.complete.org/vcs-load-dirs
+        </screen>
+      </para>
+    </refsect1>
+
+    <refsect1>
+      <title>See Also</title>
+      <para>
+        <application>tla</application>(1),
+        <application>darcs</application>(1),
+        <application>hg</application>(1),
+        <application>git</application>(1),
+        <application>tla-buildpackage</application>(1),
+        <application>darcs-buildpackage</application>(1),
+        <application>hg-buildpackage</application>(1).
+      </para>
+    </refsect1>
+  </refentry>
+</reference>
+
+<!--
+Local Variables:
+mode: sgml
+sgml-set-face: T
+End:
+-->
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/git_load_dirs	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,25 @@
+#!/usr/bin/python
+
+### REMEMBER TO UPDATE VERSION IN vcs_support/init.py
+
+# Copyright (C) 2006-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+
+from vcs_support import init
+
+init.run("git")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/hg_load_dirs	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,25 @@
+#!/usr/bin/python
+
+### REMEMBER TO UPDATE VERSION IN vcs_support/init.py
+
+# Copyright (C) 2003-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+
+from vcs_support import init
+
+init.run("hg")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/setup.py	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# Copyright (C) 2003-2007 John Goerzen
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+# END OF COPYRIGHT #
+
+from distutils.core import setup
+
+setup(name = "vcs_load_dirs",
+      author = 'John Goerzen',
+      author_email = 'jgoerzen@complete.org',
+      packages = ['vcs_support'],
+      scripts = ['tla_load_dirs', 'hg_load_dirs', 'darcs_load_dirs', 'baz_load_dirs', 'svk_load_dirs', 'git_load_dirs']
+      #license = offlineimap.version.copyright + \
+      #          ", Licensed under the GPL version 2"
+)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/svk_load_dirs	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,20 @@
+#!/usr/bin/python
+# Copyright (C) 2003-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+from vcs_support import init
+init.run("svk")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/tla_load_dirs	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,20 @@
+#!/usr/bin/python
+# Copyright (C) 2003-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+from vcs_support import init
+init.run("tla")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/vcs_support/commandver.py	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,179 @@
+# Copyright (C) 2003-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+import util
+vcssyn = None
+vcsobj = None
+vcscmd = None
+darcs = False
+svk = False
+git = False
+hg = False
+
+def setscm(x):
+    global darcs, svk, git, vcscmd, hg
+    if (x == "darcs"):
+        vcscmd = "darcs"
+        darcs = True
+    elif (x == "baz"):
+        vcscmd = "baz"
+    elif (x == "tla"):
+        vcscmd = "tla"
+    elif (x == "git"):
+        vcscmd = "git"
+        git = True
+    elif (x == "hg"):
+        vcscmd = "hg"
+        hg = True
+    elif (x == "svk"):
+        vcscmd = "svk"
+        svk = True
+    else:
+        print "Failed to determine VCS to use"
+        sys.exit(2)
+    print " VCSCMD: ", vcscmd
+
+def isdarcs():
+    global darcs
+    return darcs
+
+def issvk():
+    global svk
+    return svk
+
+def isgit():
+    global git
+    return git
+
+def ishg():
+    global hg
+    return hg
+
+def getvcssyntax():
+    global vcssyn, vcsobj
+    if vcssyn != None:
+        return vcssyn
+
+    if isdarcs():
+        vcssyn = 'darcs'
+        vcsobj = Darcs()
+    elif ishg():
+        vcssyn = 'hg'
+        vcsobj = Hg()
+    elif isgit():
+        vcssyn = 'Git'
+        vcsobj = Git()
+    elif util.getstdoutsafeexec(vcscmd, ['-V'])[0].find('tla-1.0.') != -1:
+        vcssyn = '1.0'
+        vcsobj = Tla10()
+    elif util.getstdoutsafeexec(vcscmd, ['-V'])[0].find('tla-1.1.') != -1:
+        vcssyn = '1.1'
+        vcsobj = Tla11()
+    elif util.getstdoutsafeexec(vcscmd, ['-V'])[0].find('tla-1.3.') != -1:
+        vcssyn = '1.3'
+        vcsobj = Tla13()
+    elif util.getstdoutsafeexec(vcscmd, ['-V'])[0].find('baz Bazaar version 1.4.') != -1:
+        vcssyn = 'baz1.4'
+        vcsobj = Baz14()        
+    elif util.getstdoutsafeexec(vcscmd, ['-V'])[0].find('This is svk') != -1:
+        vcssyn = 'svk'
+        vcsobj = Svk()
+    else:
+        vcssyn = '1.3'
+        vcsobj = Tla13()
+    return vcssyn
+
+class Tla10:
+    tagging_method = 'tagging-method'
+    add = ['add-tag']
+    move = 'move-tag'
+    delete = ['delete-tag']
+    update = 'update --in-place .'
+    replay = 'replay --in-place .'
+    commit = 'commit'
+    importcmd = 'import'
+
+class Tla11:
+    tagging_method = 'id-tagging-method'
+    add = ['add']
+    move = 'move'
+    delete = ['delete']
+    update = 'update'
+    replay = 'replay'
+    commit = 'commit'
+    importcmd = 'import'
+
+class Tla13:
+    tagging_method = 'id-tagging-method'
+    add = ['add-id']
+    move = 'move-id'
+    delete = ['delete-id']
+    update = 'update'
+    replay = 'replay'
+    commit = 'commit'
+    importcmd = 'import'
+
+class Baz14:
+    tagging_method = 'id-tagging-method'
+    add = ['add-id']
+    move = 'move-id'
+    delete = ['delete-id']
+    update = 'update'
+    replay = 'replay'
+    commit = 'commit'    
+    importcmd = 'import'
+
+class Darcs:
+    tagging_method = None
+    add = ['add', '--case-ok']
+    move = 'mv'
+    delete = None
+    update = 'pull'
+    replay = 'pull'
+    commit = 'record'
+
+class Hg:
+    tagging_method = None
+    add = ['add']
+    move = 'mv'
+    delete = None
+    update = 'pull'
+    replay = 'pull'
+    commit = 'commit'
+
+class Git:
+    tagging_method = None
+    add = ['add']
+    move = 'mv'
+    delete = ['rm', '-r']
+    update = 'checkout'
+    replay = None 
+    commit = 'commit'
+
+class Svk:
+	tagging_method = None
+	add = ['add']
+	move = 'mv'
+	delete = ['rm']
+	update = 'pull'
+	replay = 'pull'
+	commit = 'commit'
+
+def cmd():
+    global vcsobj
+    getvcssyntax()
+    return vcsobj
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/vcs_support/init.py	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,75 @@
+# Copyright (C) 2003-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+
+from optparse import OptionParser
+from vcs_support import util, commandver
+import sys
+
+def run(darcsdefault):
+    version = '1.1.4'
+
+    parser = OptionParser(usage="usage: %prog [options] vendor_source_dir",
+                          version=version)
+    parser.add_option("-w", "--wc", dest="wc", default=".",
+                      help="Set working copy to WC (defaults to current directory)", metavar="WC")
+    parser.add_option("-l", "--log", dest="changelog", metavar="FILE", default=None,
+                      help="Get changelog text from FILE")
+    parser.add_option("-L", "--log-message", dest="logtext", metavar="TEXT",
+                      default='', help="Log with TEXT")
+    parser.add_option("-s", "--summary", dest="summary", metavar="MSG",
+                      default=None, help="Set log summary message to MSG, overriding the default")
+    parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
+                      default=False, help="Show more status information")
+    parser.add_option("-f", "--fs-changes-only", action="store_true",
+                      dest="fsonly", default=False,
+                      help="Disable interactivity and issue no add/rm/mv commands to VCS, use with -n")
+    parser.add_option("-n", "--no-commit", action="store_false", dest="docommit",
+                      default=True, help="Do not commit the changes.")
+
+    (options, args) = parser.parse_args()
+    util.verbose = options.verbose
+
+    log = options.logtext + "\n"
+    if options.changelog:
+        fd = open(options.changelog, "r")
+        log += fd.read()
+        fd.close()
+
+    if len(args) != 1:
+        parser.error("Failed to specify a path to import.")
+
+    commandver.setscm(darcsdefault)
+
+    from vcs_support import vcs_wc, vcs_interact
+
+    wc = vcs_wc.wc(options.wc, verbose = options.verbose,
+                   fsonly = options.fsonly)
+    if not wc.gettaggingmethod() in ['explicit', 'tagline']:
+        print "Working directory uses unsupported tagging method %s" % \
+              wc.gettaggingmethod()
+        sys.exit(1)
+
+
+    iobj = vcs_interact.interaction(wc, args[0], options.docommit, log = log,
+                                    verbose = options.verbose,
+                                    summary = options.summary)
+    try:
+        iobj.main()
+    finally:
+        iobj.cleanup()
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/vcs_support/util.py	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,172 @@
+# Copyright (C) 2003-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+import os, sys, re
+
+nulldev = None
+verbose = 0
+
+class ExecProblem(Exception):
+    pass
+
+def mainexec(program, args = [], child_stdout = None,
+             child_stdin = None, child_stderr = None, wait = 1, closefds = []):
+    """Runs the program as a sub-process, passing to it args if specified.
+    The sub-process has its file descriptors adjusted as per the arguments.
+
+    If wait is 1, wait until the child exits, then return the result code from
+    os.waitpid().
+
+    If wait is 0, return the PID immediately."""
+    global verbose
+    def setfds(source, dest):
+        if source != None:
+            if hasattr(source, 'fileno'):
+                source = source.fileno()
+            os.dup2(source, dest)
+            
+    pid = os.fork()
+    if not pid:
+        if verbose:
+            print "Running: ", program, args
+        setfds(child_stdin, 0)
+        setfds(child_stdout, 1)
+        setfds(child_stderr, 2)
+        for fd in closefds:
+            os.close(fd)
+        os.execvp(program, (program,) + tuple(args))
+        sys.exit(255)
+    else:
+        if wait:
+            return os.waitpid(pid, 0)[1]
+        else:
+            return pid
+
+def safeexec(program, args = [], child_stdout = None,
+             child_stdin = None, child_stderr = None,
+             expected = 0):
+    """Calls mainexec() with the appropriate arguments, and raises
+    an exception if the program died with a signal or returned an
+    error code other than expected.  This function will always wait."""
+    result = mainexec(program, args, child_stdout, child_stdin, child_stderr)
+    return checkresult(result, expected)
+
+def getstdoutsafeexec(program, args, expected = 0):
+    pipes = os.pipe()
+    pid = mainexec(program, args, child_stdout = pipes[1], wait  = 0)
+    os.close(pipes[1])
+    fd = os.fdopen(pipes[0], 'r')
+    retval = fd.readlines()
+    checkpid(pid, expected)
+    os.close(pipes[0])
+    return retval
+
+def silentsafeexec(program, args, expected = 0):
+    """Silently runs the specified program."""
+    null = getnull()
+    result = mainexec(program, args, null, null, null)
+    return checkresult(result, expected)
+
+def checkresult(result, expected):
+    info = []
+    if os.WIFSIGNALED(result):
+        info.append("got signal %d" % os.WTERMSIG(result))
+    if os.WIFEXITED(result):
+        info.append("exited with code %d" % os.WEXITSTATUS(result))
+    info = ",".join(info)
+    if not os.WIFEXITED(result):
+        raise ExecProblem, info
+    if os.WEXITSTATUS(result) != expected:
+        raise ExecProblem, info + " (expected exit code %d)" % expected
+    return result
+
+def checkpid(pid, expected):
+    return checkresult(os.waitpid(pid, 0)[1], expected)
+
+def getnull():
+    global nulldev
+    if not nulldev:
+        nulldev = open("/dev/null", "w+")
+    return nulldev
+
+def chdircmd(newdir, func, *args, **kwargs):
+    cwd = os.getcwd()
+    os.chdir(newdir)
+    try:
+        return apply(func, args, kwargs)
+    finally:
+        os.chdir(cwd)
+
+def maketree(path, addpath = None, ignore = [], res = None):
+    thisdir = os.listdir(path)
+    retval = []
+    others = []
+    if res == None:
+        res = [re.compile(x) for x in ignore]
+    for item in thisdir:
+        skip = 0
+        for retest in res:
+            if retest.search(item):
+                skip = 1
+                break
+        if skip:
+            continue
+        dirname = os.path.join(path, item)
+        if os.path.isdir(dirname) and not os.path.islink(dirname):
+            if addpath:
+                retval.append(os.path.join(addpath, item) + '/')
+            else:
+                retval.append(item + '/')
+            if addpath:
+                newaddpath = os.path.join(addpath, item)
+            else:
+                newaddpath = item
+            others.extend(maketree(dirname, newaddpath, res = res))
+        else:
+            if addpath:
+                retval.append(os.path.join(addpath, item))
+            else:
+                retval.append(item)
+    return sorttree(retval + others)
+
+def sorttree(srctree, filesfirst = False):
+    retval = []
+    dirs = [x for x in srctree if x.endswith('/')]
+    files = [x for x in srctree if not x.endswith('/')]
+    dirs.sort()
+    files.sort()
+    if filesfirst:
+        return files + dirs
+    else:
+        return dirs + files
+    
+        
+def copyfrom(srcdir, destdir):
+    pipes = os.pipe()
+    verbargs = []
+    #if verbose:
+    #    verbargs.append('-v')
+    readerpid = chdircmd(srcdir, mainexec, "tar", ["-cSpf", "-", "."],
+                         child_stdout = pipes[1], wait = 0,
+                         closefds = [pipes[0]])
+    writerpid = chdircmd(destdir, mainexec, "tar", ["-xSpf", "-"] + verbargs,
+                         child_stdin = pipes[0], wait = 0, closefds = [pipes[1]])
+    os.close(pipes[0])
+    os.close(pipes[1])
+    checkpid(readerpid, 0)
+    checkpid(writerpid, 0)
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/vcs_support/vcs_interact.py	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,214 @@
+# Copyright (C) 2003-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+import sys, os
+import util
+from commandver import isdarcs
+from tempfile import mkdtemp
+
+class interaction:
+    def __init__(self, wcobj, importdir, docommit, log = '', verbose = 0,
+                 summary = None):
+        self.wcobj = wcobj
+        self.log = log
+        self.docommit = docommit
+        self.verb = verbose
+        self.summary = summary
+
+        if os.path.isdir(importdir):
+            self.importdir = os.path.abspath(importdir)
+            self.importfile = None
+            self.tmpdir = None
+
+        else:                           # Not a dir, try to unpack an archive.
+            self.importfile = os.path.abspath(importdir)
+            # mkdtemp returns an absolute path
+            self.importdir = mkdtemp("-vcs-load-dirs", ",,unpack-", "..")
+            self.tmpdir = self.importdir
+
+            try:
+                if self.verb:
+                    print "Unpacking archive..."
+
+                if self.importfile.endswith(".tar.gz"):
+                    util.chdircmd(self.importdir, util.safeexec, "tar",
+                                  ["-zxf", self.importfile])
+
+                elif self.importfile.endswith(".tar.bz2"):
+                    util.chdircmd(self.importdir, util.safeexec, "tar",
+                                  ["-jxf", self.importfile])
+                elif self.importfile.endswith(".zip"):
+                    util.chdircmd(self.importdir, util.safeexec, "unzip",
+                                  [self.importfile])
+                else:
+                    raise IOError, "Unknown archive file type"
+
+                # Many tarballs expand into just one single directory.
+                # Check to see if that's the case.
+
+                dents = os.listdir(self.importdir)
+                if len(dents) == 1 and os.path.isdir(self.importdir + "/" +
+                                                     dents[0]):
+                    self.importdir = self.importdir + "/" + dents[0]
+            except:
+                self.cleanup()
+                raise
+
+    def cleanup(self):
+        if not (self.tmpdir is None):
+            util.safeexec("rm", ["-rf", self.tmpdir])
+            self.tmpdir = None
+
+    def updateimportfiles(self):
+        if self.verb:
+            print "Scanning upstream tree..."
+        self.importfiles = util.maketree(self.importdir)
+
+    def updatewcfiles(self):
+        if self.verb:
+            print "Scanning working copy tree..."
+        self.wcfiles = self.wcobj.gettree()
+
+    def update(self):
+        self.updatewcfiles()
+        self.updatechangedfiles()
+
+    def updatechangedfiles(self):
+        if self.verb:
+            print "Calculating changes..."
+        wcfilehash = {}
+        for x in self.wcfiles:
+            wcfilehash[x] = 1
+        importfilehash = {}
+        for x in self.importfiles:
+            importfilehash[x] = 1
+        
+        self.addedfiles = [x for x in self.importfiles if not wcfilehash.has_key(x)]
+        self.deletedfiles = [x for x in self.wcfiles if not importfilehash.has_key(x)]
+        
+
+    def main(self):
+        def readloop():
+            for command in sys.stdin.readline().strip().split(','):
+                command = command.strip()
+                if command == 'q':
+                    return 0
+                if command == 'r':
+                    return 1
+                src, dest = command.split(' ')
+                src = int(src, 16)
+                dest = int(dest, 16)
+                self.mv(self.deletedfiles[src], self.addedfiles[dest])
+            return 1
+
+        self.updateimportfiles()
+        needsupdate = 1
+        
+        while 1:
+            self.update()
+            if self.wcobj.fsonly:
+                # Don't show this interface if we're not talking to the VCS
+                break
+            if not (len(self.addedfiles) and len(self.deletedfiles)):
+                # Just ran update; don't do it again.
+                needsupdate = 0
+                break
+
+            counter = 0
+            print "%3s %-35s %3s %-35s" % ('Num', 'Source Files', 'Num',
+                                             'Destination Files',)
+            print "%s %s %s %s" % ("-" * 3, "-" * 35, "-" * 3, "-" * 35)
+            while counter < max(len(self.addedfiles), len(self.deletedfiles)):
+                addfile = ''
+                delfile = ''
+                if counter < len(self.addedfiles):
+                    addfile = self.addedfiles[counter]
+                if counter < len(self.deletedfiles):
+                    delfile = self.deletedfiles[counter]
+                print "%3x %-35s %3x %-35s" % (counter, delfile, counter, addfile)
+                counter += 1
+            print "Syntax: src dest [,src dest [,...]] to move, q to accept, r to redraw:"
+            sys.stdout.write("Command: ")
+            sys.stdout.flush()
+            try:
+                if not readloop():
+                    break
+            except ValueError:
+                print "Error handling input; please try again."
+            except IndexError:
+                print "Error handling input; please try again."
+
+        self.catchup(needsupdate)
+        
+    def catchup(self, needsupdate = 1):
+        if self.verb:
+            print " *** Processing changes."
+        if needsupdate:
+            self.update()
+        if self.verb:
+            print "Deleting %d files" % len(self.deletedfiles)
+        if isdarcs():
+            for file in util.sorttree(self.deletedfiles, filesfirst = True):
+                self.delfile(file)
+        else:
+            for file in self.deletedfiles:
+                self.delfile(file)
+
+        if self.verb:
+            print "Copying upstream directory to working copy..."
+        util.copyfrom(self.importdir, self.wcobj.wcpath)
+
+        if self.verb:
+            print "Adding %d files" % len(self.addedfiles)
+        self.addedfiles.sort() # Make sure top-level dirs added before files
+        for file in self.addedfiles:
+            self.addfile(file)
+        self.writelog()
+        if self.docommit:
+            self.wcobj.commit()
+
+    def writelog(self):
+        logtext = ""
+        if not (self.importfile is None):
+            importname = self.importfile
+        else:
+            importname = self.importdir
+            
+        if self.summary:
+            summary = self.summary
+        else:
+            summary = "Imported %s" % os.path.basename(importname)
+        logtext += "Imported %s\ninto %s\n\n" % \
+                   (os.path.basename(importname),
+                   self.wcobj.gettreeversion())
+        logtext += self.log
+        self.wcobj.makelog(summary, logtext)
+        
+
+    def addfile(self, file):
+        self.wcobj.addtag(file)
+
+    def delfile(self, file):
+        self.wcobj.deltag(file)
+        self.wcobj.delfile(file)
+    
+        
+    def mv(self, src, dest):
+        print "%s -> %s" % (src, dest)
+        self.wcobj.movefile(src, dest)
+        self.wcobj.movetag(src, dest)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/vcs-load-dirs/vcs_support/vcs_wc.py	Mon May 18 14:22:45 2009 +0200
@@ -0,0 +1,215 @@
+# Copyright (C) 2003-2007 John Goerzen
+# <jgoerzen@complete.org>
+#
+#    This program is free software; you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation; either version 2 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+import util
+import os.path
+from commandver import cmd, isdarcs, issvk, isgit, ishg, vcscmd
+
+class wc:
+    """Object for a working copy."""
+
+    def __init__(self, wcpath, verbose = 0, fsonly = 0):
+        self.wcpath = os.path.abspath(wcpath)
+        self.verb = verbose
+        self.fsonly = fsonly
+        if not self.wcverify():
+            raise Exception, "%s is not a tla working copy" % self.wcpath
+
+    def gettreeversion(self):
+        if isdarcs():
+            #util.chdircmd(self.wcpath, util.getstdoutsafeexec, "darcs",
+            #              ['check'])
+            return "Darcs repository"
+        elif ishg():
+            return "Mercurial repository"
+        elif issvk():
+            return "Svk repository"
+        elif isgit():
+            return "Git repository"
+        else:
+            return util.chdircmd(self.wcpath, util.getstdoutsafeexec, vcscmd,
+                                 ['tree-version'])[0].strip() 
+
+    def wcverify(self):
+        try:
+            self.gettreeversion()
+        except util.ExecProblem:
+            return 0
+        return 1
+
+    def gettaggingmethod(self):
+        if isdarcs() or isgit() or ishg():
+            return 'explicit'
+        else:
+            return util.chdircmd(self.wcpath, util.getstdoutsafeexec, vcscmd,
+                                 [cmd().tagging_method])[0].strip()
+
+    def gettree(self):
+        return util.maketree(self.wcpath,
+                             ignore = [r'(^(\{arch\}$|,,|\.hg|\.hgtags|\.hgignore|\.git|_darcs|\.arch-ids$|\.arch-inventory$|\+\+)|/\.arch-ids/)'])
+    
+    def addtag(self, file):
+        if self.verb:
+            print "Adding %s" % file
+        if (file[-1] == '/') and \
+           (not os.path.exists(os.path.join(self.wcpath,
+                                            file[:-1]))):
+            try:
+                print "addtag: making dir %s" % file[:-1]
+                os.makedirs(os.path.join(self.wcpath, file[:-1]))
+            except:
+                raise
+        file = self.slashstrip(file)
+        isdir = os.path.isdir(os.path.join(self.wcpath, file))
+        if (not self.fsonly) and \
+               (not ishg()) and ((not isdarcs()) or isdir):
+            # Darcs will see adds later, but we need to add dirs
+            # now so darcs mv will work.
+            #
+            # Mercurial will see adds later, and doesn't track directories,
+            # so we don't do anything with it.
+            util.chdircmd(self.wcpath, util.safeexec, vcscmd,
+                          cmd().add + [file])
+
+    def movetag(self, src, dest):
+        if self.verb:
+            print "Moving %s to %s" % (src, dest)
+        if src[-1] == '/' \
+               and dest[-1] == '/' \
+               and ((not isdarcs()) and (not isgit()) and (not ishg())):
+            # Dir to dir -- darcs mv will catch it already.
+            # Git doesn't do rename recording, so don't worry about it?
+            return
+        src, dest = self.slashstrip(src, dest)
+        if not self.fsonly:
+            util.chdircmd(self.wcpath, util.safeexec, vcscmd,
+                          [cmd().move, src, dest])
+
+    def movefile(self, src, dest):
+        if self.verb:
+            print "Moving file %s to %s" % (src, dest)
+        src, dest = self.slashstrip(src, dest)
+
+        def doit():
+            destdir = os.path.dirname(dest)
+            if (not os.path.exists(destdir)) and destdir != '':
+                self.makedirs(destdir)
+            if self.fsonly or \
+               (not isdarcs() and (not isgit()) and (not ishg())):
+                # Darcs, hg, and git actually do this when they move the tag
+                os.rename(src, dest)
+
+        util.chdircmd(self.wcpath, doit)
+
+    def delfile(self, file):
+        if self.verb:
+            print "Deleting file %s" % file
+        fullfile = os.path.join(self.wcpath, file)
+        if os.path.isfile(fullfile):
+            os.unlink(fullfile)
+        else:
+            util.safeexec("rm", ['-rf', fullfile])
+
+    def deltag(self, file):
+        if (not self.fsonly) and \
+               ((not isdarcs()) and (not ishg())):
+            if self.verb:
+                print "Deleting %s" % file
+            if os.path.islink(os.path.join(self.wcpath,file)) or os.path.exists(os.path.join(self.wcpath, file)):
+                util.chdircmd(self.wcpath, util.safeexec, vcscmd,
+                          cmd().delete + [file])
+
+    def makelog(self, summary, logtext):
+        self.summary = summary
+        self.logtext = logtext
+        if ishg() or isgit() or isdarcs():
+            logfn = self.wcpath + "/../,,vcslog"
+	else:
+            logfn =  util.chdircmd(self.wcpath, util.getstdoutsafeexec, vcscmd,
+                                   ['make-log'])[0].strip()
+
+        self.logfn = os.path.abspath(logfn)
+        
+        fd = open(self.logfn, "w")
+        if isgit():
+            fd.write("%s\n\n" % summary)
+        if ishg():
+            fd.write("%s\n" % summary)
+        elif not (isdarcs() or ishg()):
+            fd.write("Summary: %s\n" % summary)
+            fd.write("Keywords: \n\n")
+        fd.write(logtext)
+        print "LOGTEXT", logtext
+        fd.close()
+
+
+    def commit(self):
+        if self.verb:
+            print "Committing changes"
+        if isdarcs():
+            util.chdircmd(self.wcpath, util.safeexec, vcscmd,
+                          [cmd().commit, "-l", "-a", "-m", self.summary,
+                           "--logfile", self.logfn,
+                           "--delete-logfile"])
+        elif isgit():
+            util.chdircmd(self.wcpath, util.safeexec, vcscmd,
+                          [cmd().commit, "-a", "-F", self.logfn])
+	    os.unlink(self.logfn)
+        elif ishg():
+            util.chdircmd(self.wcpath, util.safeexec, vcscmd,
+                          [cmd().commit, "-A", "-l", self.logfn])
+            os.unlink(self.logfn)
+        else:
+            if len(util.chdircmd(self.wcpath, util.getstdoutsafeexec, vcscmd, ['logs']))==0:
+                util.chdircmd(self.wcpath, util.safeexec, vcscmd, [cmd().importcmd])
+            else:
+                util.chdircmd(self.wcpath, util.safeexec, vcscmd, [cmd().commit])
+        
+    def slashstrip(self, *args):
+        retval = []
+        for item in args:
+            if not len(item):
+                retval.append(item)
+            if item[-1] == '/':
+                item = item[:-1]
+            retval.append(item)
+        if len(args) == 1:
+            return retval[0]
+        return retval
+
+
+    def makedirs(self, name, mode=0777):
+        """makedirs(path [, mode=0777])
+
+        Super-mkdir; create a leaf directory and all intermediate ones.
+        Works like mkdir, except that any intermediate path segment (not
+        just the rightmost) will be created if it does not exist.  This is
+        recursive.
+
+        (Modified from Python source)
+
+        """
+        head, tail = os.path.split(name)
+        if not tail:
+            head, tail = os.path.split(head)
+        if head and tail and not os.path.exists(head):
+            self.makedirs(head, mode)
+        if self.verb:
+            print "Created directory", name
+        os.mkdir(name, mode)
+        self.addtag(name)
+