thirdparty/google_appengine/google/appengine/tools/appcfg.py
changeset 2413 d0b7dac5325c
parent 2309 be1b94099f2d
child 2864 2e0b0af889be
equal deleted inserted replaced
2412:c61d96e72e6f 2413:d0b7dac5325c
    46 import google
    46 import google
    47 import yaml
    47 import yaml
    48 from google.appengine.cron import groctimespecification
    48 from google.appengine.cron import groctimespecification
    49 from google.appengine.api import appinfo
    49 from google.appengine.api import appinfo
    50 from google.appengine.api import croninfo
    50 from google.appengine.api import croninfo
       
    51 from google.appengine.api import queueinfo
    51 from google.appengine.api import validation
    52 from google.appengine.api import validation
    52 from google.appengine.api import yaml_errors
    53 from google.appengine.api import yaml_errors
    53 from google.appengine.api import yaml_object
    54 from google.appengine.api import yaml_object
    54 from google.appengine.datastore import datastore_index
    55 from google.appengine.datastore import datastore_index
    55 from google.appengine.tools import appengine_rpc
    56 from google.appengine.tools import appengine_rpc
    56 from google.appengine.tools import bulkloader
    57 from google.appengine.tools import bulkloader
    57 
    58 
    58 
    59 
    59 MAX_FILES_TO_CLONE = 100
    60 MAX_FILES_TO_CLONE = 100
    60 LIST_DELIMITER = "\n"
    61 LIST_DELIMITER = '\n'
    61 TUPLE_DELIMITER = "|"
    62 TUPLE_DELIMITER = '|'
    62 
    63 
    63 VERSION_FILE = "../VERSION"
    64 VERSION_FILE = '../VERSION'
    64 
    65 
    65 UPDATE_CHECK_TIMEOUT = 3
    66 UPDATE_CHECK_TIMEOUT = 3
    66 
    67 
    67 NAG_FILE = ".appcfg_nag"
    68 NAG_FILE = '.appcfg_nag'
    68 
    69 
    69 MAX_LOG_LEVEL = 4
    70 MAX_LOG_LEVEL = 4
    70 
    71 
    71 verbosity = 1
    72 verbosity = 1
    72 
    73 
    73 
    74 
    74 appinfo.AppInfoExternal.ATTRIBUTES[appinfo.RUNTIME] = "python"
    75 appinfo.AppInfoExternal.ATTRIBUTES[appinfo.RUNTIME] = 'python'
    75 _api_versions = os.environ.get('GOOGLE_TEST_API_VERSIONS', '1')
    76 _api_versions = os.environ.get('GOOGLE_TEST_API_VERSIONS', '1')
    76 _options = validation.Options(*_api_versions.split(','))
    77 _options = validation.Options(*_api_versions.split(','))
    77 appinfo.AppInfoExternal.ATTRIBUTES[appinfo.API_VERSION] = _options
    78 appinfo.AppInfoExternal.ATTRIBUTES[appinfo.API_VERSION] = _options
    78 del _api_versions, _options
    79 del _api_versions, _options
    79 
    80 
   104     The mime type string.  For example, 'text/plain' or 'image/gif'.
   105     The mime type string.  For example, 'text/plain' or 'image/gif'.
   105     None if this is not a static file.
   106     None if this is not a static file.
   106   """
   107   """
   107   for handler in config.handlers:
   108   for handler in config.handlers:
   108     handler_type = handler.GetHandlerType()
   109     handler_type = handler.GetHandlerType()
   109     if handler_type in ("static_dir", "static_files"):
   110     if handler_type in ('static_dir', 'static_files'):
   110       if handler_type == "static_dir":
   111       if handler_type == 'static_dir':
   111         regex = os.path.join(re.escape(handler.GetHandler()), ".*")
   112         regex = os.path.join(re.escape(handler.GetHandler()), '.*')
   112       else:
   113       else:
   113         regex = handler.upload
   114         regex = handler.upload
   114       if re.match(regex, filename):
   115       if re.match(regex, filename):
   115         if handler.mime_type is not None:
   116         if handler.mime_type is not None:
   116           return handler.mime_type
   117           return handler.mime_type
   117         else:
   118         else:
   118           guess = mimetypes.guess_type(filename)[0]
   119           guess = mimetypes.guess_type(filename)[0]
   119           if guess is None:
   120           if guess is None:
   120             default = "application/octet-stream"
   121             default = 'application/octet-stream'
   121             print >>sys.stderr, ("Could not guess mimetype for %s.  Using %s."
   122             print >>sys.stderr, ('Could not guess mimetype for %s.  Using %s.'
   122                                  % (filename, default))
   123                                  % (filename, default))
   123             return default
   124             return default
   124           return guess
   125           return guess
   125   return None
   126   return None
   126 
   127 
   151     opt_in: True if the user wants to check for updates on dev_appserver
   152     opt_in: True if the user wants to check for updates on dev_appserver
   152       start.  False if not.  May be None if we have not asked the user yet.
   153       start.  False if not.  May be None if we have not asked the user yet.
   153   """
   154   """
   154 
   155 
   155   ATTRIBUTES = {
   156   ATTRIBUTES = {
   156       "timestamp": validation.TYPE_FLOAT,
   157       'timestamp': validation.TYPE_FLOAT,
   157       "opt_in": validation.Optional(validation.TYPE_BOOL),
   158       'opt_in': validation.Optional(validation.TYPE_BOOL),
   158   }
   159   }
   159 
   160 
   160   @staticmethod
   161   @staticmethod
   161   def Load(nag_file):
   162   def Load(nag_file):
   162     """Load a single NagFile object where one and only one is expected.
   163     """Load a single NagFile object where one and only one is expected.
   181     A Yaml object or None if the VERSION file does not exist.
   182     A Yaml object or None if the VERSION file does not exist.
   182   """
   183   """
   183   version_filename = os.path.join(os.path.dirname(google.__file__),
   184   version_filename = os.path.join(os.path.dirname(google.__file__),
   184                                   VERSION_FILE)
   185                                   VERSION_FILE)
   185   if not isfile(version_filename):
   186   if not isfile(version_filename):
   186     logging.error("Could not find version file at %s", version_filename)
   187     logging.error('Could not find version file at %s', version_filename)
   187     return None
   188     return None
   188 
   189 
   189   version_fh = open_fn(version_filename, "r")
   190   version_fh = open_fn(version_filename, 'r')
   190   try:
   191   try:
   191     version = yaml.safe_load(version_fh)
   192     version = yaml.safe_load(version_fh)
   192   finally:
   193   finally:
   193     version_fh.close()
   194     version_fh.close()
   194 
   195 
   195   return version
   196   return version
   196 
   197 
   197 def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable):
   198 
   198     """Calls a function multiple times, backing off more and more each time.
   199 def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable_func):
   199 
   200   """Calls a function multiple times, backing off more and more each time.
   200     Args:
   201 
   201       initial_delay: Initial delay after first try, in seconds.
   202   Args:
   202       backoff_factor: Delay will be multiplied by this factor after each try.
   203     initial_delay: Initial delay after first try, in seconds.
   203       max_tries: Maximum number of tries.
   204     backoff_factor: Delay will be multiplied by this factor after each try.
   204       callable: The method to call, will pass no arguments.
   205     max_tries: Maximum number of tries.
   205 
   206     callable_func: The method to call, will pass no arguments.
   206     Returns:
   207 
   207       True if the function succeded in one of its tries.
   208   Returns:
   208 
   209     True if the function succeded in one of its tries.
   209     Raises:
   210 
   210       Whatever the function raises--an exception will immediately stop retries.
   211   Raises:
   211     """
   212     Whatever the function raises--an exception will immediately stop retries.
   212     delay = initial_delay
   213   """
   213     while not callable() and max_tries > 0:
   214   delay = initial_delay
   214       StatusUpdate("Will check again in %s seconds." % delay)
   215   while not callable_func() and max_tries > 0:
   215       time.sleep(delay)
   216     StatusUpdate('Will check again in %s seconds.' % delay)
   216       delay *= backoff_factor
   217     time.sleep(delay)
   217       max_tries -= 1
   218     delay *= backoff_factor
   218     return max_tries > 0
   219     max_tries -= 1
       
   220   return max_tries > 0
   219 
   221 
   220 
   222 
   221 class UpdateCheck(object):
   223 class UpdateCheck(object):
   222   """Determines if the local SDK is the latest version.
   224   """Determines if the local SDK is the latest version.
   223 
   225 
   258     self.open = open_fn
   260     self.open = open_fn
   259 
   261 
   260   @staticmethod
   262   @staticmethod
   261   def MakeNagFilename():
   263   def MakeNagFilename():
   262     """Returns the filename for the nag file for this user."""
   264     """Returns the filename for the nag file for this user."""
   263     user_homedir = os.path.expanduser("~/")
   265     user_homedir = os.path.expanduser('~/')
   264     if not os.path.isdir(user_homedir):
   266     if not os.path.isdir(user_homedir):
   265       drive, unused_tail = os.path.splitdrive(os.__file__)
   267       drive, unused_tail = os.path.splitdrive(os.__file__)
   266       if drive:
   268       if drive:
   267         os.environ["HOMEDRIVE"] = drive
   269         os.environ['HOMEDRIVE'] = drive
   268 
   270 
   269     return os.path.expanduser("~/" + NAG_FILE)
   271     return os.path.expanduser('~/' + NAG_FILE)
   270 
   272 
   271   def _ParseVersionFile(self):
   273   def _ParseVersionFile(self):
   272     """Parse the local VERSION file.
   274     """Parse the local VERSION file.
   273 
   275 
   274     Returns:
   276     Returns:
   285     Raises:
   287     Raises:
   286       SystemExit if the api_version is not supported.
   288       SystemExit if the api_version is not supported.
   287     """
   289     """
   288     version = self._ParseVersionFile()
   290     version = self._ParseVersionFile()
   289     if version is None:
   291     if version is None:
   290       logging.error("Could not determine if the SDK supports the api_version "
   292       logging.error('Could not determine if the SDK supports the api_version '
   291                     "requested in app.yaml.")
   293                     'requested in app.yaml.')
   292       return
   294       return
   293     if self.config.api_version not in version["api_versions"]:
   295     if self.config.api_version not in version['api_versions']:
   294       logging.critical("The api_version specified in app.yaml (%s) is not "
   296       logging.critical('The api_version specified in app.yaml (%s) is not '
   295                        "supported by this release of the SDK.  The supported "
   297                        'supported by this release of the SDK.  The supported '
   296                        "api_versions are %s.",
   298                        'api_versions are %s.',
   297                        self.config.api_version, version["api_versions"])
   299                        self.config.api_version, version['api_versions'])
   298       sys.exit(1)
   300       sys.exit(1)
   299 
   301 
   300   def CheckForUpdates(self):
   302   def CheckForUpdates(self):
   301     """Queries the server for updates and nags the user if appropriate.
   303     """Queries the server for updates and nags the user if appropriate.
   302 
   304 
   303     Queries the server for the latest SDK version at the same time reporting
   305     Queries the server for the latest SDK version at the same time reporting
   304     the local SDK version.  The server will respond with a yaml document
   306     the local SDK version.  The server will respond with a yaml document
   305     containing the fields:
   307     containing the fields:
   306       "release": The name of the release (e.g. 1.2).
   308       'release': The name of the release (e.g. 1.2).
   307       "timestamp": The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ).
   309       'timestamp': The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ).
   308       "api_versions": A list of api_version strings (e.g. ['1', 'beta']).
   310       'api_versions': A list of api_version strings (e.g. ['1', 'beta']).
   309 
   311 
   310     We will nag the user with increasing severity if:
   312     We will nag the user with increasing severity if:
   311     - There is a new release.
   313     - There is a new release.
   312     - There is a new release with a new api_version.
   314     - There is a new release with a new api_version.
   313     - There is a new release that does not support the api_version named in
   315     - There is a new release that does not support the api_version named in
   314       self.config.
   316       self.config.
   315     """
   317     """
   316     version = self._ParseVersionFile()
   318     version = self._ParseVersionFile()
   317     if version is None:
   319     if version is None:
   318       logging.info("Skipping update check")
   320       logging.info('Skipping update check')
   319       return
   321       return
   320     logging.info("Checking for updates to the SDK.")
   322     logging.info('Checking for updates to the SDK.')
   321 
   323 
   322     try:
   324     try:
   323       response = self.server.Send("/api/updatecheck",
   325       response = self.server.Send('/api/updatecheck',
   324                                   timeout=UPDATE_CHECK_TIMEOUT,
   326                                   timeout=UPDATE_CHECK_TIMEOUT,
   325                                   release=version["release"],
   327                                   release=version['release'],
   326                                   timestamp=version["timestamp"],
   328                                   timestamp=version['timestamp'],
   327                                   api_versions=version["api_versions"])
   329                                   api_versions=version['api_versions'])
   328     except urllib2.URLError, e:
   330     except urllib2.URLError, e:
   329       logging.info("Update check failed: %s", e)
   331       logging.info('Update check failed: %s', e)
   330       return
   332       return
   331 
   333 
   332     latest = yaml.safe_load(response)
   334     latest = yaml.safe_load(response)
   333     if latest["release"] == version["release"]:
   335     if latest['release'] == version['release']:
   334       logging.info("The SDK is up to date.")
   336       logging.info('The SDK is up to date.')
   335       return
   337       return
   336 
   338 
   337     api_versions = latest["api_versions"]
   339     api_versions = latest['api_versions']
   338     if self.config.api_version not in api_versions:
   340     if self.config.api_version not in api_versions:
   339       self._Nag(
   341       self._Nag(
   340           "The api version you are using (%s) is obsolete!  You should\n"
   342           'The api version you are using (%s) is obsolete!  You should\n'
   341           "upgrade your SDK and test that your code works with the new\n"
   343           'upgrade your SDK and test that your code works with the new\n'
   342           "api version." % self.config.api_version,
   344           'api version.' % self.config.api_version,
   343           latest, version, force=True)
   345           latest, version, force=True)
   344       return
   346       return
   345 
   347 
   346     if self.config.api_version != api_versions[len(api_versions) - 1]:
   348     if self.config.api_version != api_versions[len(api_versions) - 1]:
   347       self._Nag(
   349       self._Nag(
   348           "The api version you are using (%s) is deprecated. You should\n"
   350           'The api version you are using (%s) is deprecated. You should\n'
   349           "upgrade your SDK to try the new functionality." %
   351           'upgrade your SDK to try the new functionality.' %
   350           self.config.api_version, latest, version)
   352           self.config.api_version, latest, version)
   351       return
   353       return
   352 
   354 
   353     self._Nag("There is a new release of the SDK available.",
   355     self._Nag('There is a new release of the SDK available.',
   354               latest, version)
   356               latest, version)
   355 
   357 
   356   def _ParseNagFile(self):
   358   def _ParseNagFile(self):
   357     """Parses the nag file.
   359     """Parses the nag file.
   358 
   360 
   359     Returns:
   361     Returns:
   360       A NagFile if the file was present else None.
   362       A NagFile if the file was present else None.
   361     """
   363     """
   362     nag_filename = UpdateCheck.MakeNagFilename()
   364     nag_filename = UpdateCheck.MakeNagFilename()
   363     if self.isfile(nag_filename):
   365     if self.isfile(nag_filename):
   364       fh = self.open(nag_filename, "r")
   366       fh = self.open(nag_filename, 'r')
   365       try:
   367       try:
   366         nag = NagFile.Load(fh)
   368         nag = NagFile.Load(fh)
   367       finally:
   369       finally:
   368         fh.close()
   370         fh.close()
   369       return nag
   371       return nag
   378     Args:
   380     Args:
   379       nag: The NagFile to write.
   381       nag: The NagFile to write.
   380     """
   382     """
   381     nagfilename = UpdateCheck.MakeNagFilename()
   383     nagfilename = UpdateCheck.MakeNagFilename()
   382     try:
   384     try:
   383       fh = self.open(nagfilename, "w")
   385       fh = self.open(nagfilename, 'w')
   384       try:
   386       try:
   385         fh.write(nag.ToYAML())
   387         fh.write(nag.ToYAML())
   386       finally:
   388       finally:
   387         fh.close()
   389         fh.close()
   388     except (OSError, IOError), e:
   390     except (OSError, IOError), e:
   389       logging.error("Could not write nag file to %s. Error: %s", nagfilename, e)
   391       logging.error('Could not write nag file to %s. Error: %s', nagfilename, e)
   390 
   392 
   391   def _Nag(self, msg, latest, version, force=False):
   393   def _Nag(self, msg, latest, version, force=False):
   392     """Prints a nag message and updates the nag file's timestamp.
   394     """Prints a nag message and updates the nag file's timestamp.
   393 
   395 
   394     Because we don't want to nag the user everytime, we store a simple
   396     Because we don't want to nag the user everytime, we store a simple
   404     """
   406     """
   405     nag = self._ParseNagFile()
   407     nag = self._ParseNagFile()
   406     if nag and not force:
   408     if nag and not force:
   407       last_nag = datetime.datetime.fromtimestamp(nag.timestamp)
   409       last_nag = datetime.datetime.fromtimestamp(nag.timestamp)
   408       if datetime.datetime.now() - last_nag < datetime.timedelta(weeks=1):
   410       if datetime.datetime.now() - last_nag < datetime.timedelta(weeks=1):
   409         logging.debug("Skipping nag message")
   411         logging.debug('Skipping nag message')
   410         return
   412         return
   411 
   413 
   412     if nag is None:
   414     if nag is None:
   413       nag = NagFile()
   415       nag = NagFile()
   414     nag.timestamp = time.time()
   416     nag.timestamp = time.time()
   415     self._WriteNagFile(nag)
   417     self._WriteNagFile(nag)
   416 
   418 
   417     print "****************************************************************"
   419     print '****************************************************************'
   418     print msg
   420     print msg
   419     print "-----------"
   421     print '-----------'
   420     print "Latest SDK:"
   422     print 'Latest SDK:'
   421     print yaml.dump(latest)
   423     print yaml.dump(latest)
   422     print "-----------"
   424     print '-----------'
   423     print "Your SDK:"
   425     print 'Your SDK:'
   424     print yaml.dump(version)
   426     print yaml.dump(version)
   425     print "-----------"
   427     print '-----------'
   426     print "Please visit http://code.google.com/appengine for the latest SDK"
   428     print 'Please visit http://code.google.com/appengine for the latest SDK'
   427     print "****************************************************************"
   429     print '****************************************************************'
   428 
   430 
   429   def AllowedToCheckForUpdates(self, input_fn=raw_input):
   431   def AllowedToCheckForUpdates(self, input_fn=raw_input):
   430     """Determines if the user wants to check for updates.
   432     """Determines if the user wants to check for updates.
   431 
   433 
   432     On startup, the dev_appserver wants to check for updates to the SDK.
   434     On startup, the dev_appserver wants to check for updates to the SDK.
   448     if nag is None:
   450     if nag is None:
   449       nag = NagFile()
   451       nag = NagFile()
   450       nag.timestamp = time.time()
   452       nag.timestamp = time.time()
   451 
   453 
   452     if nag.opt_in is None:
   454     if nag.opt_in is None:
   453       answer = input_fn("Allow dev_appserver to check for updates on startup? "
   455       answer = input_fn('Allow dev_appserver to check for updates on startup? '
   454                         "(Y/n): ")
   456                         '(Y/n): ')
   455       answer = answer.strip().lower()
   457       answer = answer.strip().lower()
   456       if answer == "n" or answer == "no":
   458       if answer == 'n' or answer == 'no':
   457         print ("dev_appserver will not check for updates on startup.  To "
   459         print ('dev_appserver will not check for updates on startup.  To '
   458                "change this setting, edit %s" % UpdateCheck.MakeNagFilename())
   460                'change this setting, edit %s' % UpdateCheck.MakeNagFilename())
   459         nag.opt_in = False
   461         nag.opt_in = False
   460       else:
   462       else:
   461         print ("dev_appserver will check for updates on startup.  To change "
   463         print ('dev_appserver will check for updates on startup.  To change '
   462                "this setting, edit %s" % UpdateCheck.MakeNagFilename())
   464                'this setting, edit %s' % UpdateCheck.MakeNagFilename())
   463         nag.opt_in = True
   465         nag.opt_in = True
   464       self._WriteNagFile(nag)
   466       self._WriteNagFile(nag)
   465     return nag.opt_in
   467     return nag.opt_in
   466 
   468 
   467 
   469 
   481     self.config = config
   483     self.config = config
   482     self.definitions = definitions
   484     self.definitions = definitions
   483 
   485 
   484   def DoUpload(self):
   486   def DoUpload(self):
   485     """Uploads the index definitions."""
   487     """Uploads the index definitions."""
   486     StatusUpdate("Uploading index definitions.")
   488     StatusUpdate('Uploading index definitions.')
   487     self.server.Send("/api/datastore/index/add",
   489     self.server.Send('/api/datastore/index/add',
   488                      app_id=self.config.application,
   490                      app_id=self.config.application,
   489                      version=self.config.version,
   491                      version=self.config.version,
   490                      payload=self.definitions.ToYAML())
   492                      payload=self.definitions.ToYAML())
   491 
   493 
   492 
   494 
   506     self.config = config
   508     self.config = config
   507     self.cron = cron
   509     self.cron = cron
   508 
   510 
   509   def DoUpload(self):
   511   def DoUpload(self):
   510     """Uploads the cron entries."""
   512     """Uploads the cron entries."""
   511     StatusUpdate("Uploading cron entries.")
   513     StatusUpdate('Uploading cron entries.')
   512     self.server.Send("/api/datastore/cron/update",
   514     self.server.Send('/api/datastore/cron/update',
   513                      app_id=self.config.application,
   515                      app_id=self.config.application,
   514                      version=self.config.version,
   516                      version=self.config.version,
   515                      payload=self.cron.ToYAML())
   517                      payload=self.cron.ToYAML())
       
   518 
       
   519 
       
   520 class QueueEntryUpload(object):
       
   521   """Provides facilities to upload task queue entries to the hosting service."""
       
   522 
       
   523   def __init__(self, server, config, queue):
       
   524     """Creates a new QueueEntryUpload.
       
   525 
       
   526     Args:
       
   527       server: The RPC server to use.  Should be an instance of a subclass of
       
   528       AbstractRpcServer
       
   529       config: The AppInfoExternal object derived from the app.yaml file.
       
   530       queue: The QueueInfoExternal object loaded from the queue.yaml file.
       
   531     """
       
   532     self.server = server
       
   533     self.config = config
       
   534     self.queue = queue
       
   535 
       
   536   def DoUpload(self):
       
   537     """Uploads the task queue entries."""
       
   538     StatusUpdate('Uploading task queue entries.')
       
   539     self.server.Send('/api/queue/update',
       
   540                      app_id=self.config.application,
       
   541                      version=self.config.version,
       
   542                      payload=self.queue.ToYAML())
   516 
   543 
   517 
   544 
   518 class IndexOperation(object):
   545 class IndexOperation(object):
   519   """Provide facilities for writing Index operation commands."""
   546   """Provide facilities for writing Index operation commands."""
   520 
   547 
   541       is the set of indexes that are present in the index.yaml file but missing
   568       is the set of indexes that are present in the index.yaml file but missing
   542       from the server.  The second record is the set of indexes that are
   569       from the server.  The second record is the set of indexes that are
   543       present on the server but missing from the index.yaml file (indicating
   570       present on the server but missing from the index.yaml file (indicating
   544       that these indexes should probably be vacuumed).
   571       that these indexes should probably be vacuumed).
   545     """
   572     """
   546     StatusUpdate("Fetching index definitions diff.")
   573     StatusUpdate('Fetching index definitions diff.')
   547     response = self.server.Send("/api/datastore/index/diff",
   574     response = self.server.Send('/api/datastore/index/diff',
   548                                 app_id=self.config.application,
   575                                 app_id=self.config.application,
   549                                 payload=definitions.ToYAML())
   576                                 payload=definitions.ToYAML())
   550     return datastore_index.ParseMultipleIndexDefinitions(response)
   577     return datastore_index.ParseMultipleIndexDefinitions(response)
   551 
   578 
   552   def DoDelete(self, definitions):
   579   def DoDelete(self, definitions):
   559       A single datstore_index.IndexDefinitions containing indexes that were
   586       A single datstore_index.IndexDefinitions containing indexes that were
   560       not deleted, probably because they were already removed.  This may
   587       not deleted, probably because they were already removed.  This may
   561       be normal behavior as there is a potential race condition between fetching
   588       be normal behavior as there is a potential race condition between fetching
   562       the index-diff and sending deletion confirmation through.
   589       the index-diff and sending deletion confirmation through.
   563     """
   590     """
   564     StatusUpdate("Deleting selected index definitions.")
   591     StatusUpdate('Deleting selected index definitions.')
   565     response = self.server.Send("/api/datastore/index/delete",
   592     response = self.server.Send('/api/datastore/index/delete',
   566                                 app_id=self.config.application,
   593                                 app_id=self.config.application,
   567                                 payload=definitions.ToYAML())
   594                                 payload=definitions.ToYAML())
   568     return datastore_index.ParseIndexDefinitions(response)
   595     return datastore_index.ParseIndexDefinitions(response)
   569 
   596 
   570 
   597 
   606 
   633 
   607     Returns:
   634     Returns:
   608       True if user enters 'y' or 'a'.  False if user enter 'n'.
   635       True if user enters 'y' or 'a'.  False if user enter 'n'.
   609     """
   636     """
   610     while True:
   637     while True:
   611       print "This index is no longer defined in your index.yaml file."
   638       print 'This index is no longer defined in your index.yaml file.'
   612       print
   639       print
   613       print index.ToYAML()
   640       print index.ToYAML()
   614       print
   641       print
   615 
   642 
   616       confirmation = self.confirmation_fn(
   643       confirmation = self.confirmation_fn(
   617           "Are you sure you want to delete this index? (N/y/a): ")
   644           'Are you sure you want to delete this index? (N/y/a): ')
   618       confirmation = confirmation.strip().lower()
   645       confirmation = confirmation.strip().lower()
   619 
   646 
   620       if confirmation == "y":
   647       if confirmation == 'y':
   621         return True
   648         return True
   622       elif confirmation == "n" or not confirmation:
   649       elif confirmation == 'n' or not confirmation:
   623         return False
   650         return False
   624       elif confirmation == "a":
   651       elif confirmation == 'a':
   625         self.force = True
   652         self.force = True
   626         return True
   653         return True
   627       else:
   654       else:
   628         print "Did not understand your response."
   655         print 'Did not understand your response.'
   629 
   656 
   630   def DoVacuum(self, definitions):
   657   def DoVacuum(self, definitions):
   631     """Vacuum indexes in datastore.
   658     """Vacuum indexes in datastore.
   632 
   659 
   633     This method will query the server to determine which indexes are not
   660     This method will query the server to determine which indexes are not
   657       not_deleted = self.DoDelete(deletions)
   684       not_deleted = self.DoDelete(deletions)
   658 
   685 
   659       if not_deleted.indexes:
   686       if not_deleted.indexes:
   660         not_deleted_count = len(not_deleted.indexes)
   687         not_deleted_count = len(not_deleted.indexes)
   661         if not_deleted_count == 1:
   688         if not_deleted_count == 1:
   662           warning_message = ("An index was not deleted.  Most likely this is "
   689           warning_message = ('An index was not deleted.  Most likely this is '
   663                              "because it no longer exists.\n\n")
   690                              'because it no longer exists.\n\n')
   664         else:
   691         else:
   665           warning_message = ("%d indexes were not deleted.  Most likely this "
   692           warning_message = ('%d indexes were not deleted.  Most likely this '
   666                              "is because they no longer exist.\n\n"
   693                              'is because they no longer exist.\n\n'
   667                              % not_deleted_count)
   694                              % not_deleted_count)
   668         for index in not_deleted.indexes:
   695         for index in not_deleted.indexes:
   669           warning_message += index.ToYAML()
   696           warning_message += index.ToYAML()
   670         logging.warning(warning_message)
   697         logging.warning(warning_message)
   671 
   698 
   672 
   699 
   673 class LogsRequester(object):
   700 class LogsRequester(object):
   674   """Provide facilities to export request logs."""
   701   """Provide facilities to export request logs."""
   675 
   702 
   676   def __init__(self, server, config, output_file,
   703   def __init__(self, server, config, output_file,
   677                num_days, append, severity, now, vhost):
   704                num_days, append, severity, now, vhost, include_vhost):
   678     """Constructor.
   705     """Constructor.
   679 
   706 
   680     Args:
   707     Args:
   681       server: The RPC server to use.  Should be an instance of HttpRpcServer
   708       server: The RPC server to use.  Should be an instance of HttpRpcServer
   682         or TestRpcServer.
   709         or TestRpcServer.
   685       num_days: Number of days worth of logs to export; 0 for all available.
   712       num_days: Number of days worth of logs to export; 0 for all available.
   686       append: True if appending to an existing file.
   713       append: True if appending to an existing file.
   687       severity: App log severity to request (0-4); None for no app logs.
   714       severity: App log severity to request (0-4); None for no app logs.
   688       now: POSIX timestamp used for calculating valid dates for num_days.
   715       now: POSIX timestamp used for calculating valid dates for num_days.
   689       vhost: The virtual host of log messages to get. None for all hosts.
   716       vhost: The virtual host of log messages to get. None for all hosts.
       
   717       include_vhost: If true, the virtual host is included in log messages.
   690     """
   718     """
   691     self.server = server
   719     self.server = server
   692     self.config = config
   720     self.config = config
   693     self.output_file = output_file
   721     self.output_file = output_file
   694     self.append = append
   722     self.append = append
   695     self.num_days = num_days
   723     self.num_days = num_days
   696     self.severity = severity
   724     self.severity = severity
   697     self.vhost = vhost
   725     self.vhost = vhost
   698     self.version_id = self.config.version + ".1"
   726     self.include_vhost = include_vhost
       
   727     self.version_id = self.config.version + '.1'
   699     self.sentinel = None
   728     self.sentinel = None
   700     self.write_mode = "w"
   729     self.write_mode = 'w'
   701     if self.append:
   730     if self.append:
   702       self.sentinel = FindSentinel(self.output_file)
   731       self.sentinel = FindSentinel(self.output_file)
   703       self.write_mode = "a"
   732       self.write_mode = 'a'
   704     self.valid_dates = None
   733     self.valid_dates = None
   705     if self.num_days:
   734     if self.num_days:
   706       patterns = []
   735       patterns = []
   707       now = PacificTime(now)
   736       now = PacificTime(now)
   708       for i in xrange(self.num_days):
   737       for i in xrange(self.num_days):
   709         then = time.gmtime(now - 24*3600 * i)
   738         then = time.gmtime(now - 24*3600 * i)
   710         patterns.append(re.escape(time.strftime("%d/%m/%Y", then)))
   739         patterns.append(re.escape(time.strftime('%d/%m/%Y', then)))
   711         patterns.append(re.escape(time.strftime("%d/%b/%Y", then)))
   740         patterns.append(re.escape(time.strftime('%d/%b/%Y', then)))
   712       self.valid_dates = re.compile(r"[^[]+\[(" + "|".join(patterns) + r"):")
   741       self.valid_dates = re.compile(r'[^[]+\[(' + '|'.join(patterns) + r'):')
   713 
   742 
   714   def DownloadLogs(self):
   743   def DownloadLogs(self):
   715     """Download the requested logs.
   744     """Download the requested logs.
   716 
   745 
   717     This will write the logs to the file designated by
   746     This will write the logs to the file designated by
   718     self.output_file, or to stdout if the filename is '-'.
   747     self.output_file, or to stdout if the filename is '-'.
   719     Multiple roundtrips to the server may be made.
   748     Multiple roundtrips to the server may be made.
   720     """
   749     """
   721     StatusUpdate("Downloading request logs for %s %s." %
   750     StatusUpdate('Downloading request logs for %s %s.' %
   722                  (self.config.application, self.version_id))
   751                  (self.config.application, self.version_id))
   723     tf = tempfile.TemporaryFile()
   752     tf = tempfile.TemporaryFile()
   724     offset = None
   753     offset = None
   725     try:
   754     try:
   726       while True:
   755       while True:
   727         try:
   756         try:
   728           offset = self.RequestLogLines(tf, offset)
   757           offset = self.RequestLogLines(tf, offset)
   729           if not offset:
   758           if not offset:
   730             break
   759             break
   731         except KeyboardInterrupt:
   760         except KeyboardInterrupt:
   732           StatusUpdate("Keyboard interrupt; saving data downloaded so far.")
   761           StatusUpdate('Keyboard interrupt; saving data downloaded so far.')
   733           break
   762           break
   734       StatusUpdate("Copying request logs to %r." % self.output_file)
   763       StatusUpdate('Copying request logs to %r.' % self.output_file)
   735       if self.output_file == "-":
   764       if self.output_file == '-':
   736         of = sys.stdout
   765         of = sys.stdout
   737       else:
   766       else:
   738         try:
   767         try:
   739           of = open(self.output_file, self.write_mode)
   768           of = open(self.output_file, self.write_mode)
   740         except IOError, err:
   769         except IOError, err:
   741           StatusUpdate("Can't write %r: %s." % (self.output_file, err))
   770           StatusUpdate('Can\'t write %r: %s.' % (self.output_file, err))
   742           sys.exit(1)
   771           sys.exit(1)
   743       try:
   772       try:
   744         line_count = CopyReversedLines(tf, of)
   773         line_count = CopyReversedLines(tf, of)
   745       finally:
   774       finally:
   746         of.flush()
   775         of.flush()
   747         if of is not sys.stdout:
   776         if of is not sys.stdout:
   748           of.close()
   777           of.close()
   749     finally:
   778     finally:
   750       tf.close()
   779       tf.close()
   751     StatusUpdate("Copied %d records." % line_count)
   780     StatusUpdate('Copied %d records.' % line_count)
   752 
   781 
   753   def RequestLogLines(self, tf, offset):
   782   def RequestLogLines(self, tf, offset):
   754     """Make a single roundtrip to the server.
   783     """Make a single roundtrip to the server.
   755 
   784 
   756     Args:
   785     Args:
   761 
   790 
   762     Returns:
   791     Returns:
   763       The offset string to be used for the next request, if another
   792       The offset string to be used for the next request, if another
   764       request should be issued; or None, if not.
   793       request should be issued; or None, if not.
   765     """
   794     """
   766     logging.info("Request with offset %r.", offset)
   795     logging.info('Request with offset %r.', offset)
   767     kwds = {"app_id": self.config.application,
   796     kwds = {'app_id': self.config.application,
   768             "version": self.version_id,
   797             'version': self.version_id,
   769             "limit": 100,
   798             'limit': 100,
   770            }
   799            }
   771     if offset:
   800     if offset:
   772       kwds["offset"] = offset
   801       kwds['offset'] = offset
   773     if self.severity is not None:
   802     if self.severity is not None:
   774       kwds["severity"] = str(self.severity)
   803       kwds['severity'] = str(self.severity)
   775     if self.vhost is not None:
   804     if self.vhost is not None:
   776       kwds["vhost"] = str(self.vhost)
   805       kwds['vhost'] = str(self.vhost)
   777     response = self.server.Send("/api/request_logs", payload=None, **kwds)
   806     if self.include_vhost is not None:
   778     response = response.replace("\r", "\0")
   807       kwds['include_vhost'] = str(self.include_vhost)
       
   808     response = self.server.Send('/api/request_logs', payload=None, **kwds)
       
   809     response = response.replace('\r', '\0')
   779     lines = response.splitlines()
   810     lines = response.splitlines()
   780     logging.info("Received %d bytes, %d records.", len(response), len(lines))
   811     logging.info('Received %d bytes, %d records.', len(response), len(lines))
   781     offset = None
   812     offset = None
   782     if lines and lines[0].startswith("#"):
   813     if lines and lines[0].startswith('#'):
   783       match = re.match(r"^#\s*next_offset=(\S+)\s*$", lines[0])
   814       match = re.match(r'^#\s*next_offset=(\S+)\s*$', lines[0])
   784       del lines[0]
   815       del lines[0]
   785       if match:
   816       if match:
   786         offset = match.group(1)
   817         offset = match.group(1)
   787     if lines and lines[-1].startswith("#"):
   818     if lines and lines[-1].startswith('#'):
   788       del lines[-1]
   819       del lines[-1]
   789     valid_dates = self.valid_dates
   820     valid_dates = self.valid_dates
   790     sentinel = self.sentinel
   821     sentinel = self.sentinel
   791     len_sentinel = None
   822     len_sentinel = None
   792     if sentinel:
   823     if sentinel:
   793       len_sentinel = len(sentinel)
   824       len_sentinel = len(sentinel)
   794     for line in lines:
   825     for line in lines:
   795       if ((sentinel and
   826       if ((sentinel and
   796            line.startswith(sentinel) and
   827            line.startswith(sentinel) and
   797            line[len_sentinel : len_sentinel+1] in ("", "\0")) or
   828            line[len_sentinel : len_sentinel+1] in ('', '\0')) or
   798           (valid_dates and not valid_dates.match(line))):
   829           (valid_dates and not valid_dates.match(line))):
   799         return None
   830         return None
   800       tf.write(line + "\n")
   831       tf.write(line + '\n')
   801     if not lines:
   832     if not lines:
   802       return None
   833       return None
   803     return offset
   834     return offset
   804 
   835 
   805 
   836 
   859 
   890 
   860 def CopyReversedLines(instream, outstream, blocksize=2**16):
   891 def CopyReversedLines(instream, outstream, blocksize=2**16):
   861   r"""Copy lines from input stream to output stream in reverse order.
   892   r"""Copy lines from input stream to output stream in reverse order.
   862 
   893 
   863   As a special feature, null bytes in the input are turned into
   894   As a special feature, null bytes in the input are turned into
   864   newlines followed by tabs in the output, but these "sub-lines"
   895   newlines followed by tabs in the output, but these 'sub-lines'
   865   separated by null bytes are not reversed.  E.g. If the input is
   896   separated by null bytes are not reversed.  E.g. If the input is
   866   "A\0B\nC\0D\n", the output is "C\n\tD\nA\n\tB\n".
   897   'A\0B\nC\0D\n', the output is 'C\n\tD\nA\n\tB\n'.
   867 
   898 
   868   Args:
   899   Args:
   869     instream: A seekable stream open for reading in binary mode.
   900     instream: A seekable stream open for reading in binary mode.
   870     outstream: A stream open for writing; doesn't have to be seekable or binary.
   901     outstream: A stream open for writing; doesn't have to be seekable or binary.
   871     blocksize: Optional block size for buffering, for unit testing.
   902     blocksize: Optional block size for buffering, for unit testing.
   874     The number of lines copied.
   905     The number of lines copied.
   875   """
   906   """
   876   line_count = 0
   907   line_count = 0
   877   instream.seek(0, 2)
   908   instream.seek(0, 2)
   878   last_block = instream.tell() // blocksize
   909   last_block = instream.tell() // blocksize
   879   spillover = ""
   910   spillover = ''
   880   for iblock in xrange(last_block + 1, -1, -1):
   911   for iblock in xrange(last_block + 1, -1, -1):
   881     instream.seek(iblock * blocksize)
   912     instream.seek(iblock * blocksize)
   882     data = instream.read(blocksize)
   913     data = instream.read(blocksize)
   883     lines = data.splitlines(True)
   914     lines = data.splitlines(True)
   884     lines[-1:] = "".join(lines[-1:] + [spillover]).splitlines(True)
   915     lines[-1:] = ''.join(lines[-1:] + [spillover]).splitlines(True)
   885     if lines and not lines[-1].endswith("\n"):
   916     if lines and not lines[-1].endswith('\n'):
   886       lines[-1] += "\n"
   917       lines[-1] += '\n'
   887     lines.reverse()
   918     lines.reverse()
   888     if lines and iblock > 0:
   919     if lines and iblock > 0:
   889       spillover = lines.pop()
   920       spillover = lines.pop()
   890     if lines:
   921     if lines:
   891       line_count += len(lines)
   922       line_count += len(lines)
   892       data = "".join(lines).replace("\0", "\n\t")
   923       data = ''.join(lines).replace('\0', '\n\t')
   893       outstream.write(data)
   924       outstream.write(data)
   894   return line_count
   925   return line_count
   895 
   926 
   896 
   927 
   897 def FindSentinel(filename, blocksize=2**16):
   928 def FindSentinel(filename, blocksize=2**16):
   905     The contents of the last line in the file that doesn't start with
   936     The contents of the last line in the file that doesn't start with
   906     a tab, with its trailing newline stripped; or None if the file
   937     a tab, with its trailing newline stripped; or None if the file
   907     couldn't be opened or no such line could be found by inspecting
   938     couldn't be opened or no such line could be found by inspecting
   908     the last 'blocksize' bytes of the file.
   939     the last 'blocksize' bytes of the file.
   909   """
   940   """
   910   if filename == "-":
   941   if filename == '-':
   911     StatusUpdate("Can't combine --append with output to stdout.")
   942     StatusUpdate('Can\'t combine --append with output to stdout.')
   912     sys.exit(2)
   943     sys.exit(2)
   913   try:
   944   try:
   914     fp = open(filename, "rb")
   945     fp = open(filename, 'rb')
   915   except IOError, err:
   946   except IOError, err:
   916     StatusUpdate("Append mode disabled: can't read %r: %s." % (filename, err))
   947     StatusUpdate('Append mode disabled: can\'t read %r: %s.' % (filename, err))
   917     return None
   948     return None
   918   try:
   949   try:
   919     fp.seek(0, 2)
   950     fp.seek(0, 2)
   920     fp.seek(max(0, fp.tell() - blocksize))
   951     fp.seek(max(0, fp.tell() - blocksize))
   921     lines = fp.readlines()
   952     lines = fp.readlines()
   922     del lines[:1]
   953     del lines[:1]
   923     sentinel = None
   954     sentinel = None
   924     for line in lines:
   955     for line in lines:
   925       if not line.startswith("\t"):
   956       if not line.startswith('\t'):
   926         sentinel = line
   957         sentinel = line
   927     if not sentinel:
   958     if not sentinel:
   928       StatusUpdate("Append mode disabled: can't find sentinel in %r." %
   959       StatusUpdate('Append mode disabled: can\'t find sentinel in %r.' %
   929                    filename)
   960                    filename)
   930       return None
   961       return None
   931     return sentinel.rstrip("\n")
   962     return sentinel.rstrip('\n')
   932   finally:
   963   finally:
   933     fp.close()
   964     fp.close()
   934 
   965 
   935 
   966 
   936 class AppVersionUpload(object):
   967 class AppVersionUpload(object):
   973 
  1004 
   974     Returns:
  1005     Returns:
   975       The string representation of the hash.
  1006       The string representation of the hash.
   976     """
  1007     """
   977     h = sha.new(content).hexdigest()
  1008     h = sha.new(content).hexdigest()
   978     return "%s_%s_%s_%s_%s" % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40])
  1009     return '%s_%s_%s_%s_%s' % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40])
   979 
  1010 
   980   def AddFile(self, path, file_handle):
  1011   def AddFile(self, path, file_handle):
   981     """Adds the provided file to the list to be pushed to the server.
  1012     """Adds the provided file to the list to be pushed to the server.
   982 
  1013 
   983     Args:
  1014     Args:
   984       path: The path the file should be uploaded as.
  1015       path: The path the file should be uploaded as.
   985       file_handle: A stream containing data to upload.
  1016       file_handle: A stream containing data to upload.
   986     """
  1017     """
   987     assert not self.in_transaction, "Already in a transaction."
  1018     assert not self.in_transaction, 'Already in a transaction.'
   988     assert file_handle is not None
  1019     assert file_handle is not None
   989 
  1020 
   990     reason = appinfo.ValidFilename(path)
  1021     reason = appinfo.ValidFilename(path)
   991     if reason:
  1022     if reason:
   992       logging.error(reason)
  1023       logging.error(reason)
  1005 
  1036 
  1006     Returns:
  1037     Returns:
  1007       A list of pathnames for files that should be uploaded using UploadFile()
  1038       A list of pathnames for files that should be uploaded using UploadFile()
  1008       before Commit() can be called.
  1039       before Commit() can be called.
  1009     """
  1040     """
  1010     assert not self.in_transaction, "Already in a transaction."
  1041     assert not self.in_transaction, 'Already in a transaction.'
  1011 
  1042 
  1012     StatusUpdate("Initiating update.")
  1043     StatusUpdate('Initiating update.')
  1013     self.server.Send("/api/appversion/create", app_id=self.app_id,
  1044     self.server.Send('/api/appversion/create', app_id=self.app_id,
  1014                      version=self.version, payload=self.config.ToYAML())
  1045                      version=self.version, payload=self.config.ToYAML())
  1015     self.in_transaction = True
  1046     self.in_transaction = True
  1016 
  1047 
  1017     files_to_clone = []
  1048     files_to_clone = []
  1018     blobs_to_clone = []
  1049     blobs_to_clone = []
  1034         file_type: the type of the files
  1065         file_type: the type of the files
  1035       """
  1066       """
  1036       if not files:
  1067       if not files:
  1037         return
  1068         return
  1038 
  1069 
  1039       StatusUpdate("Cloning %d %s file%s." %
  1070       StatusUpdate('Cloning %d %s file%s.' %
  1040                    (len(files), file_type, len(files) != 1 and "s" or ""))
  1071                    (len(files), file_type, len(files) != 1 and 's' or ''))
  1041       for i in xrange(0, len(files), MAX_FILES_TO_CLONE):
  1072       for i in xrange(0, len(files), MAX_FILES_TO_CLONE):
  1042         if i > 0 and i % MAX_FILES_TO_CLONE == 0:
  1073         if i > 0 and i % MAX_FILES_TO_CLONE == 0:
  1043           StatusUpdate("Cloned %d files." % i)
  1074           StatusUpdate('Cloned %d files.' % i)
  1044 
  1075 
  1045         chunk = files[i:min(len(files), i + MAX_FILES_TO_CLONE)]
  1076         chunk = files[i:min(len(files), i + MAX_FILES_TO_CLONE)]
  1046         result = self.server.Send(url,
  1077         result = self.server.Send(url,
  1047                                   app_id=self.app_id, version=self.version,
  1078                                   app_id=self.app_id, version=self.version,
  1048                                   payload=BuildClonePostBody(chunk))
  1079                                   payload=BuildClonePostBody(chunk))
  1049         if result:
  1080         if result:
  1050           files_to_upload.update(dict(
  1081           files_to_upload.update(dict(
  1051               (f, self.files[f]) for f in result.split(LIST_DELIMITER)))
  1082               (f, self.files[f]) for f in result.split(LIST_DELIMITER)))
  1052 
  1083 
  1053     CloneFiles("/api/appversion/cloneblobs", blobs_to_clone, "static")
  1084     CloneFiles('/api/appversion/cloneblobs', blobs_to_clone, 'static')
  1054     CloneFiles("/api/appversion/clonefiles", files_to_clone, "application")
  1085     CloneFiles('/api/appversion/clonefiles', files_to_clone, 'application')
  1055 
  1086 
  1056     logging.info("Files to upload: " + str(files_to_upload))
  1087     logging.info('Files to upload: ' + str(files_to_upload))
  1057 
  1088 
  1058     self.files = files_to_upload
  1089     self.files = files_to_upload
  1059     return sorted(files_to_upload.iterkeys())
  1090     return sorted(files_to_upload.iterkeys())
  1060 
  1091 
  1061   def UploadFile(self, path, file_handle):
  1092   def UploadFile(self, path, file_handle):
  1069       file_handle: A file-like object containing the data to upload.
  1100       file_handle: A file-like object containing the data to upload.
  1070 
  1101 
  1071     Raises:
  1102     Raises:
  1072       KeyError: The provided file is not amongst those to be uploaded.
  1103       KeyError: The provided file is not amongst those to be uploaded.
  1073     """
  1104     """
  1074     assert self.in_transaction, "Begin() must be called before UploadFile()."
  1105     assert self.in_transaction, 'Begin() must be called before UploadFile().'
  1075     if path not in self.files:
  1106     if path not in self.files:
  1076       raise KeyError("File '%s' is not in the list of files to be uploaded."
  1107       raise KeyError('File \'%s\' is not in the list of files to be uploaded.'
  1077                      % path)
  1108                      % path)
  1078 
  1109 
  1079     del self.files[path]
  1110     del self.files[path]
  1080     mime_type = GetMimeTypeIfStaticFile(self.config, path)
  1111     mime_type = GetMimeTypeIfStaticFile(self.config, path)
  1081     if mime_type is not None:
  1112     if mime_type is not None:
  1082       self.server.Send("/api/appversion/addblob", app_id=self.app_id,
  1113       self.server.Send('/api/appversion/addblob', app_id=self.app_id,
  1083                        version=self.version, path=path, content_type=mime_type,
  1114                        version=self.version, path=path, content_type=mime_type,
  1084                        payload=file_handle.read())
  1115                        payload=file_handle.read())
  1085     else:
  1116     else:
  1086       self.server.Send("/api/appversion/addfile", app_id=self.app_id,
  1117       self.server.Send('/api/appversion/addfile', app_id=self.app_id,
  1087                        version=self.version, path=path,
  1118                        version=self.version, path=path,
  1088                        payload=file_handle.read())
  1119                        payload=file_handle.read())
  1089 
  1120 
  1090   def Commit(self):
  1121   def Commit(self):
  1091     """Commits the transaction, making the new app version available.
  1122     """Commits the transaction, making the new app version available.
  1096     This tries the new 'deploy' method; if that fails it uses the old 'commit'.
  1127     This tries the new 'deploy' method; if that fails it uses the old 'commit'.
  1097 
  1128 
  1098     Raises:
  1129     Raises:
  1099       Exception: Some required files were not uploaded.
  1130       Exception: Some required files were not uploaded.
  1100     """
  1131     """
  1101     assert self.in_transaction, "Begin() must be called before Commit()."
  1132     assert self.in_transaction, 'Begin() must be called before Commit().'
  1102     if self.files:
  1133     if self.files:
  1103       raise Exception("Not all required files have been uploaded.")
  1134       raise Exception('Not all required files have been uploaded.')
  1104 
  1135 
  1105     try:
  1136     try:
  1106       self.Deploy()
  1137       self.Deploy()
  1107       if not RetryWithBackoff(1, 2, 8, self.IsReady):
  1138       if not RetryWithBackoff(1, 2, 8, self.IsReady):
  1108         logging.warning("Version still not ready to serve, aborting.")
  1139         logging.warning('Version still not ready to serve, aborting.')
  1109         raise Exception("Version not ready.")
  1140         raise Exception('Version not ready.')
  1110       self.StartServing()
  1141       self.StartServing()
  1111     except urllib2.HTTPError, e:
  1142     except urllib2.HTTPError, e:
  1112       if e.code != 404:
  1143       if e.code != 404:
  1113         raise
  1144         raise
  1114       StatusUpdate("Closing update.")
  1145       StatusUpdate('Closing update.')
  1115       self.server.Send("/api/appversion/commit", app_id=self.app_id,
  1146       self.server.Send('/api/appversion/commit', app_id=self.app_id,
  1116                        version=self.version)
  1147                        version=self.version)
  1117       self.in_transaction = False
  1148       self.in_transaction = False
  1118 
  1149 
  1119   def Deploy(self):
  1150   def Deploy(self):
  1120     """Deploys the new app version but does not make it default.
  1151     """Deploys the new app version but does not make it default.
  1123     before Deploy() can be called.
  1154     before Deploy() can be called.
  1124 
  1155 
  1125     Raises:
  1156     Raises:
  1126       Exception: Some required files were not uploaded.
  1157       Exception: Some required files were not uploaded.
  1127     """
  1158     """
  1128     assert self.in_transaction, "Begin() must be called before Deploy()."
  1159     assert self.in_transaction, 'Begin() must be called before Deploy().'
  1129     if self.files:
  1160     if self.files:
  1130       raise Exception("Not all required files have been uploaded.")
  1161       raise Exception('Not all required files have been uploaded.')
  1131 
  1162 
  1132     StatusUpdate("Deploying new version.")
  1163     StatusUpdate('Deploying new version.')
  1133     self.server.Send("/api/appversion/deploy", app_id=self.app_id,
  1164     self.server.Send('/api/appversion/deploy', app_id=self.app_id,
  1134                      version=self.version)
  1165                      version=self.version)
  1135     self.deployed = True
  1166     self.deployed = True
  1136 
  1167 
  1137   def IsReady(self):
  1168   def IsReady(self):
  1138     """Check if the new app version is ready to serve traffic.
  1169     """Check if the new app version is ready to serve traffic.
  1141       Exception: Deploy has not yet been called.
  1172       Exception: Deploy has not yet been called.
  1142 
  1173 
  1143     Returns:
  1174     Returns:
  1144       True if the server returned the app is ready to serve.
  1175       True if the server returned the app is ready to serve.
  1145     """
  1176     """
  1146     assert self.deployed, "Deploy() must be called before IsReady()."
  1177     assert self.deployed, 'Deploy() must be called before IsReady().'
  1147 
  1178 
  1148     StatusUpdate("Checking if new version is ready to serve.")
  1179     StatusUpdate('Checking if new version is ready to serve.')
  1149     result = self.server.Send("/api/appversion/isready", app_id=self.app_id,
  1180     result = self.server.Send('/api/appversion/isready', app_id=self.app_id,
  1150                               version=self.version)
  1181                               version=self.version)
  1151     return result == "1"
  1182     return result == '1'
  1152 
  1183 
  1153   def StartServing(self):
  1184   def StartServing(self):
  1154     """Start serving with the newly created version.
  1185     """Start serving with the newly created version.
  1155 
  1186 
  1156     Raises:
  1187     Raises:
  1157       Exception: Deploy has not yet been called.
  1188       Exception: Deploy has not yet been called.
  1158     """
  1189     """
  1159     assert self.deployed, "Deploy() must be called before IsReady()."
  1190     assert self.deployed, 'Deploy() must be called before IsReady().'
  1160 
  1191 
  1161     StatusUpdate("Closing update: new version is ready to start serving.")
  1192     StatusUpdate('Closing update: new version is ready to start serving.')
  1162     self.server.Send("/api/appversion/startserving",
  1193     self.server.Send('/api/appversion/startserving',
  1163                      app_id=self.app_id, version=self.version)
  1194                      app_id=self.app_id, version=self.version)
  1164     self.in_transaction = False
  1195     self.in_transaction = False
  1165 
  1196 
  1166   def Rollback(self):
  1197   def Rollback(self):
  1167     """Rolls back the transaction if one is in progress."""
  1198     """Rolls back the transaction if one is in progress."""
  1168     if not self.in_transaction:
  1199     if not self.in_transaction:
  1169       return
  1200       return
  1170     StatusUpdate("Rolling back the update.")
  1201     StatusUpdate('Rolling back the update.')
  1171     self.server.Send("/api/appversion/rollback", app_id=self.app_id,
  1202     self.server.Send('/api/appversion/rollback', app_id=self.app_id,
  1172                      version=self.version)
  1203                      version=self.version)
  1173     self.in_transaction = False
  1204     self.in_transaction = False
  1174     self.files = {}
  1205     self.files = {}
  1175 
  1206 
  1176   def DoUpload(self, paths, max_size, openfunc):
  1207   def DoUpload(self, paths, max_size, openfunc):
  1179     Args:
  1210     Args:
  1180       paths: An iterator that yields the relative paths of the files to upload.
  1211       paths: An iterator that yields the relative paths of the files to upload.
  1181       max_size: The maximum size file to upload.
  1212       max_size: The maximum size file to upload.
  1182       openfunc: A function that takes a path and returns a file-like object.
  1213       openfunc: A function that takes a path and returns a file-like object.
  1183     """
  1214     """
  1184     logging.info("Reading app configuration.")
  1215     logging.info('Reading app configuration.')
  1185 
  1216 
  1186     path = ""
  1217     path = ''
  1187     try:
  1218     try:
  1188       StatusUpdate("Scanning files on local disk.")
  1219       StatusUpdate('Scanning files on local disk.')
  1189       num_files = 0
  1220       num_files = 0
  1190       for path in paths:
  1221       for path in paths:
  1191         file_handle = openfunc(path)
  1222         file_handle = openfunc(path)
  1192         try:
  1223         try:
  1193           if self.config.skip_files.match(path):
  1224           if self.config.skip_files.match(path):
  1194             logging.info("Ignoring file '%s': File matches ignore regex.",
  1225             logging.info('Ignoring file \'%s\': File matches ignore regex.',
  1195                          path)
  1226                          path)
  1196           else:
  1227           else:
  1197             file_length = GetFileLength(file_handle)
  1228             file_length = GetFileLength(file_handle)
  1198             if file_length > max_size:
  1229             if file_length > max_size:
  1199               logging.error("Ignoring file '%s': Too long "
  1230               logging.error('Ignoring file \'%s\': Too long '
  1200                             "(max %d bytes, file is %d bytes)",
  1231                             '(max %d bytes, file is %d bytes)',
  1201                             path, max_size, file_length)
  1232                             path, max_size, file_length)
  1202             else:
  1233             else:
  1203               logging.info("Processing file '%s'", path)
  1234               logging.info('Processing file \'%s\'', path)
  1204               self.AddFile(path, file_handle)
  1235               self.AddFile(path, file_handle)
  1205         finally:
  1236         finally:
  1206           file_handle.close()
  1237           file_handle.close()
  1207         num_files += 1
  1238         num_files += 1
  1208         if num_files % 500 == 0:
  1239         if num_files % 500 == 0:
  1209           StatusUpdate("Scanned %d files." % num_files)
  1240           StatusUpdate('Scanned %d files.' % num_files)
  1210     except KeyboardInterrupt:
  1241     except KeyboardInterrupt:
  1211       logging.info("User interrupted. Aborting.")
  1242       logging.info('User interrupted. Aborting.')
  1212       raise
  1243       raise
  1213     except EnvironmentError, e:
  1244     except EnvironmentError, e:
  1214       logging.error("An error occurred processing file '%s': %s. Aborting.",
  1245       logging.error('An error occurred processing file \'%s\': %s. Aborting.',
  1215                     path, e)
  1246                     path, e)
  1216       raise
  1247       raise
  1217 
  1248 
  1218     try:
  1249     try:
  1219       missing_files = self.Begin()
  1250       missing_files = self.Begin()
  1220       if missing_files:
  1251       if missing_files:
  1221         StatusUpdate("Uploading %d files." % len(missing_files))
  1252         StatusUpdate('Uploading %d files.' % len(missing_files))
  1222         num_files = 0
  1253         num_files = 0
  1223         for missing_file in missing_files:
  1254         for missing_file in missing_files:
  1224           logging.info("Uploading file '%s'" % missing_file)
  1255           logging.info('Uploading file \'%s\'' % missing_file)
  1225           file_handle = openfunc(missing_file)
  1256           file_handle = openfunc(missing_file)
  1226           try:
  1257           try:
  1227             self.UploadFile(missing_file, file_handle)
  1258             self.UploadFile(missing_file, file_handle)
  1228           finally:
  1259           finally:
  1229             file_handle.close()
  1260             file_handle.close()
  1230           num_files += 1
  1261           num_files += 1
  1231           if num_files % 500 == 0:
  1262           if num_files % 500 == 0:
  1232             StatusUpdate("Uploaded %d files." % num_files)
  1263             StatusUpdate('Uploaded %d files.' % num_files)
  1233 
  1264 
  1234       self.Commit()
  1265       self.Commit()
  1235 
  1266 
  1236     except KeyboardInterrupt:
  1267     except KeyboardInterrupt:
  1237       logging.info("User interrupted. Aborting.")
  1268       logging.info('User interrupted. Aborting.')
  1238       self.Rollback()
  1269       self.Rollback()
  1239       raise
  1270       raise
  1240     except:
  1271     except:
  1241       logging.exception("An unexpected error occurred. Aborting.")
  1272       logging.exception('An unexpected error occurred. Aborting.')
  1242       self.Rollback()
  1273       self.Rollback()
  1243       raise
  1274       raise
  1244 
  1275 
  1245     logging.info("Done!")
  1276     logging.info('Done!')
  1246 
  1277 
  1247 
  1278 
  1248 def FileIterator(base, separator=os.path.sep):
  1279 def FileIterator(base, separator=os.path.sep):
  1249   """Walks a directory tree, returning all the files. Follows symlinks.
  1280   """Walks a directory tree, returning all the files. Follows symlinks.
  1250 
  1281 
  1253     separator: Path separator used by the running system's platform.
  1284     separator: Path separator used by the running system's platform.
  1254 
  1285 
  1255   Yields:
  1286   Yields:
  1256     Paths of files found, relative to base.
  1287     Paths of files found, relative to base.
  1257   """
  1288   """
  1258   dirs = [""]
  1289   dirs = ['']
  1259   while dirs:
  1290   while dirs:
  1260     current_dir = dirs.pop()
  1291     current_dir = dirs.pop()
  1261     for entry in os.listdir(os.path.join(base, current_dir)):
  1292     for entry in os.listdir(os.path.join(base, current_dir)):
  1262       name = os.path.join(current_dir, entry)
  1293       name = os.path.join(current_dir, entry)
  1263       fullname = os.path.join(base, name)
  1294       fullname = os.path.join(base, name)
  1264       if os.path.isfile(fullname):
  1295       if os.path.isfile(fullname):
  1265         if separator == "\\":
  1296         if separator == '\\':
  1266           name = name.replace("\\", "/")
  1297           name = name.replace('\\', '/')
  1267         yield name
  1298         yield name
  1268       elif os.path.isdir(fullname):
  1299       elif os.path.isdir(fullname):
  1269         dirs.append(name)
  1300         dirs.append(name)
  1270 
  1301 
  1271 
  1302 
  1300     get_platform: Used for testing.
  1331     get_platform: Used for testing.
  1301 
  1332 
  1302   Returns:
  1333   Returns:
  1303     String containing the 'user-agent' header value, which includes the SDK
  1334     String containing the 'user-agent' header value, which includes the SDK
  1304     version, the platform information, and the version of Python;
  1335     version, the platform information, and the version of Python;
  1305     e.g., "appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2".
  1336     e.g., 'appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2'.
  1306   """
  1337   """
  1307   product_tokens = []
  1338   product_tokens = []
  1308 
  1339 
  1309   sdk_name = os.environ.get("APPCFG_SDK_NAME")
  1340   sdk_name = os.environ.get('APPCFG_SDK_NAME')
  1310   if sdk_name:
  1341   if sdk_name:
  1311     product_tokens.append(sdk_name)
  1342     product_tokens.append(sdk_name)
  1312   else:
  1343   else:
  1313     version = get_version()
  1344     version = get_version()
  1314     if version is None:
  1345     if version is None:
  1315       release = "unknown"
  1346       release = 'unknown'
  1316     else:
  1347     else:
  1317       release = version["release"]
  1348       release = version['release']
  1318 
  1349 
  1319     product_tokens.append("appcfg_py/%s" % release)
  1350     product_tokens.append('appcfg_py/%s' % release)
  1320 
  1351 
  1321   product_tokens.append(get_platform())
  1352   product_tokens.append(get_platform())
  1322 
  1353 
  1323   python_version = ".".join(str(i) for i in sys.version_info)
  1354   python_version = '.'.join(str(i) for i in sys.version_info)
  1324   product_tokens.append("Python/%s" % python_version)
  1355   product_tokens.append('Python/%s' % python_version)
  1325 
  1356 
  1326   return " ".join(product_tokens)
  1357   return ' '.join(product_tokens)
  1327 
  1358 
  1328 
  1359 
  1329 def GetSourceName(get_version=GetVersionObject):
  1360 def GetSourceName(get_version=GetVersionObject):
  1330   """Gets the name of this source version."""
  1361   """Gets the name of this source version."""
  1331   version = get_version()
  1362   version = get_version()
  1332   if version is None:
  1363   if version is None:
  1333     release = "unknown"
  1364     release = 'unknown'
  1334   else:
  1365   else:
  1335     release = version["release"]
  1366     release = version['release']
  1336   return "Google-appcfg-%s" % (release,)
  1367   return 'Google-appcfg-%s' % (release,)
  1337 
  1368 
  1338 
  1369 
  1339 class AppCfgApp(object):
  1370 class AppCfgApp(object):
  1340   """Singleton class to wrap AppCfg tool functionality.
  1371   """Singleton class to wrap AppCfg tool functionality.
  1341 
  1372 
  1395     self.options, self.args = self.parser.parse_args(argv[1:])
  1426     self.options, self.args = self.parser.parse_args(argv[1:])
  1396 
  1427 
  1397     if len(self.args) < 1:
  1428     if len(self.args) < 1:
  1398       self._PrintHelpAndExit()
  1429       self._PrintHelpAndExit()
  1399     if self.args[0] not in self.actions:
  1430     if self.args[0] not in self.actions:
  1400       self.parser.error("Unknown action '%s'\n%s" %
  1431       self.parser.error('Unknown action \'%s\'\n%s' %
  1401                         (self.args[0], self.parser.get_description()))
  1432                         (self.args[0], self.parser.get_description()))
  1402     action_name = self.args.pop(0)
  1433     action_name = self.args.pop(0)
  1403     self.action = self.actions[action_name]
  1434     self.action = self.actions[action_name]
  1404 
  1435 
  1405     self.parser, self.options = self._MakeSpecificParser(self.action)
  1436     self.parser, self.options = self._MakeSpecificParser(self.action)
  1417 
  1448 
  1418   def Run(self):
  1449   def Run(self):
  1419     """Executes the requested action.
  1450     """Executes the requested action.
  1420 
  1451 
  1421     Catches any HTTPErrors raised by the action and prints them to stderr.
  1452     Catches any HTTPErrors raised by the action and prints them to stderr.
       
  1453 
       
  1454     Returns:
       
  1455       1 on error, 0 if successful.
  1422     """
  1456     """
  1423     try:
  1457     try:
  1424       self.action(self)
  1458       self.action(self)
  1425     except urllib2.HTTPError, e:
  1459     except urllib2.HTTPError, e:
  1426       body = e.read()
  1460       body = e.read()
  1427       print >>self.error_fh, ("Error %d: --- begin server output ---\n"
  1461       print >>self.error_fh, ('Error %d: --- begin server output ---\n'
  1428                               "%s\n--- end server output ---" %
  1462                               '%s\n--- end server output ---' %
  1429                               (e.code, body.rstrip("\n")))
  1463                               (e.code, body.rstrip('\n')))
  1430       return 1
  1464       return 1
  1431     except yaml_errors.EventListenerError, e:
  1465     except yaml_errors.EventListenerError, e:
  1432       print >>self.error_fh, ("Error parsing yaml file:\n%s" % e)
  1466       print >>self.error_fh, ('Error parsing yaml file:\n%s' % e)
  1433       return 1
  1467       return 1
  1434     return 0
  1468     return 0
  1435 
  1469 
  1436   def _GetActionDescriptions(self):
  1470   def _GetActionDescriptions(self):
  1437     """Returns a formatted string containing the short_descs for all actions."""
  1471     """Returns a formatted string containing the short_descs for all actions."""
  1438     action_names = self.actions.keys()
  1472     action_names = self.actions.keys()
  1439     action_names.sort()
  1473     action_names.sort()
  1440     desc = ""
  1474     desc = ''
  1441     for action_name in action_names:
  1475     for action_name in action_names:
  1442       desc += "  %s: %s\n" % (action_name, self.actions[action_name].short_desc)
  1476       desc += '  %s: %s\n' % (action_name, self.actions[action_name].short_desc)
  1443     return desc
  1477     return desc
  1444 
  1478 
  1445   def _GetOptionParser(self):
  1479   def _GetOptionParser(self):
  1446     """Creates an OptionParser with generic usage and description strings.
  1480     """Creates an OptionParser with generic usage and description strings.
  1447 
  1481 
  1452     class Formatter(optparse.IndentedHelpFormatter):
  1486     class Formatter(optparse.IndentedHelpFormatter):
  1453       """Custom help formatter that does not reformat the description."""
  1487       """Custom help formatter that does not reformat the description."""
  1454 
  1488 
  1455       def format_description(self, description):
  1489       def format_description(self, description):
  1456         """Very simple formatter."""
  1490         """Very simple formatter."""
  1457         return description + "\n"
  1491         return description + '\n'
  1458 
  1492 
  1459     desc = self._GetActionDescriptions()
  1493     desc = self._GetActionDescriptions()
  1460     desc = ("Action must be one of:\n%s"
  1494     desc = ('Action must be one of:\n%s'
  1461             "Use 'help <action>' for a detailed description.") % desc
  1495             'Use \'help <action>\' for a detailed description.') % desc
  1462 
  1496 
  1463     parser = self.parser_class(usage="%prog [options] <action>",
  1497     parser = self.parser_class(usage='%prog [options] <action>',
  1464                                description=desc,
  1498                                description=desc,
  1465                                formatter=Formatter(),
  1499                                formatter=Formatter(),
  1466                                conflict_handler="resolve")
  1500                                conflict_handler='resolve')
  1467     parser.add_option("-h", "--help", action="store_true",
  1501     parser.add_option('-h', '--help', action='store_true',
  1468                       dest="help", help="Show the help message and exit.")
  1502                       dest='help', help='Show the help message and exit.')
  1469     parser.add_option("-q", "--quiet", action="store_const", const=0,
  1503     parser.add_option('-q', '--quiet', action='store_const', const=0,
  1470                       dest="verbose", help="Print errors only.")
  1504                       dest='verbose', help='Print errors only.')
  1471     parser.add_option("-v", "--verbose", action="store_const", const=2,
  1505     parser.add_option('-v', '--verbose', action='store_const', const=2,
  1472                       dest="verbose", default=1,
  1506                       dest='verbose', default=1,
  1473                       help="Print info level logs.")
  1507                       help='Print info level logs.')
  1474     parser.add_option("--noisy", action="store_const", const=3,
  1508     parser.add_option('--noisy', action='store_const', const=3,
  1475                       dest="verbose", help="Print all logs.")
  1509                       dest='verbose', help='Print all logs.')
  1476     parser.add_option("-s", "--server", action="store", dest="server",
  1510     parser.add_option('-s', '--server', action='store', dest='server',
  1477                       default="appengine.google.com",
  1511                       default='appengine.google.com',
  1478                       metavar="SERVER", help="The server to connect to.")
  1512                       metavar='SERVER', help='The server to connect to.')
  1479     parser.add_option("--secure", action="store_true", dest="secure",
  1513     parser.add_option('--secure', action='store_true', dest='secure',
  1480                       default=False,
  1514                       default=False,
  1481                       help="Use SSL when communicating with the server.")
  1515                       help='Use SSL when communicating with the server.')
  1482     parser.add_option("-e", "--email", action="store", dest="email",
  1516     parser.add_option('-e', '--email', action='store', dest='email',
  1483                       metavar="EMAIL", default=None,
  1517                       metavar='EMAIL', default=None,
  1484                       help="The username to use. Will prompt if omitted.")
  1518                       help='The username to use. Will prompt if omitted.')
  1485     parser.add_option("-H", "--host", action="store", dest="host",
  1519     parser.add_option('-H', '--host', action='store', dest='host',
  1486                       metavar="HOST", default=None,
  1520                       metavar='HOST', default=None,
  1487                       help="Overrides the Host header sent with all RPCs.")
  1521                       help='Overrides the Host header sent with all RPCs.')
  1488     parser.add_option("--no_cookies", action="store_false",
  1522     parser.add_option('--no_cookies', action='store_false',
  1489                       dest="save_cookies", default=True,
  1523                       dest='save_cookies', default=True,
  1490                       help="Do not save authentication cookies to local disk.")
  1524                       help='Do not save authentication cookies to local disk.')
  1491     parser.add_option("--passin", action="store_true",
  1525     parser.add_option('--passin', action='store_true',
  1492                       dest="passin", default=False,
  1526                       dest='passin', default=False,
  1493                       help="Read the login password from stdin.")
  1527                       help='Read the login password from stdin.')
  1494     return parser
  1528     return parser
  1495 
  1529 
  1496   def _MakeSpecificParser(self, action):
  1530   def _MakeSpecificParser(self, action):
  1497     """Creates a new parser with documentation specific to 'action'.
  1531     """Creates a new parser with documentation specific to 'action'.
  1498 
  1532 
  1504       parser: An instance of OptionsParser customized to 'action'.
  1538       parser: An instance of OptionsParser customized to 'action'.
  1505       options: The command line options after re-parsing.
  1539       options: The command line options after re-parsing.
  1506     """
  1540     """
  1507     parser = self._GetOptionParser()
  1541     parser = self._GetOptionParser()
  1508     parser.set_usage(action.usage)
  1542     parser.set_usage(action.usage)
  1509     parser.set_description("%s\n%s" % (action.short_desc, action.long_desc))
  1543     parser.set_description('%s\n%s' % (action.short_desc, action.long_desc))
  1510     action.options(self, parser)
  1544     action.options(self, parser)
  1511     options, unused_args = parser.parse_args(self.argv[1:])
  1545     options, unused_args = parser.parse_args(self.argv[1:])
  1512     return parser, options
  1546     return parser, options
  1513 
  1547 
  1514   def _PrintHelpAndExit(self, exit_code=2):
  1548   def _PrintHelpAndExit(self, exit_code=2):
  1529 
  1563 
  1530     def GetUserCredentials():
  1564     def GetUserCredentials():
  1531       """Prompts the user for a username and password."""
  1565       """Prompts the user for a username and password."""
  1532       email = self.options.email
  1566       email = self.options.email
  1533       if email is None:
  1567       if email is None:
  1534         email = self.raw_input_fn("Email: ")
  1568         email = self.raw_input_fn('Email: ')
  1535 
  1569 
  1536       password_prompt = "Password for %s: " % email
  1570       password_prompt = 'Password for %s: ' % email
  1537       if self.options.passin:
  1571       if self.options.passin:
  1538         password = self.raw_input_fn(password_prompt)
  1572         password = self.raw_input_fn(password_prompt)
  1539       else:
  1573       else:
  1540         password = self.password_input_fn(password_prompt)
  1574         password = self.password_input_fn(password_prompt)
  1541 
  1575 
  1542       return (email, password)
  1576       return (email, password)
  1543 
  1577 
  1544     if self.options.host and self.options.host == "localhost":
  1578     if self.options.host and self.options.host == 'localhost':
  1545       email = self.options.email
  1579       email = self.options.email
  1546       if email is None:
  1580       if email is None:
  1547         email = "test@example.com"
  1581         email = 'test@example.com'
  1548         logging.info("Using debug user %s.  Override with --email" % email)
  1582         logging.info('Using debug user %s.  Override with --email' % email)
  1549       server = self.rpc_server_class(
  1583       server = self.rpc_server_class(
  1550           self.options.server,
  1584           self.options.server,
  1551           lambda: (email, "password"),
  1585           lambda: (email, 'password'),
  1552           GetUserAgent(),
  1586           GetUserAgent(),
  1553           GetSourceName(),
  1587           GetSourceName(),
  1554           host_override=self.options.host,
  1588           host_override=self.options.host,
  1555           save_cookies=self.options.save_cookies)
  1589           save_cookies=self.options.save_cookies)
  1556       server.authenticated = True
  1590       server.authenticated = True
  1564     return self.rpc_server_class(self.options.server, GetUserCredentials,
  1598     return self.rpc_server_class(self.options.server, GetUserCredentials,
  1565                                  GetUserAgent(), GetSourceName(),
  1599                                  GetUserAgent(), GetSourceName(),
  1566                                  host_override=self.options.host,
  1600                                  host_override=self.options.host,
  1567                                  save_cookies=self.options.save_cookies,
  1601                                  save_cookies=self.options.save_cookies,
  1568                                  auth_tries=auth_tries,
  1602                                  auth_tries=auth_tries,
  1569                                  account_type="HOSTED_OR_GOOGLE",
  1603                                  account_type='HOSTED_OR_GOOGLE',
  1570                                  secure=self.options.secure)
  1604                                  secure=self.options.secure)
  1571 
  1605 
  1572   def _FindYaml(self, basepath, file_name):
  1606   def _FindYaml(self, basepath, file_name):
  1573     """Find yaml files in application directory.
  1607     """Find yaml files in application directory.
  1574 
  1608 
  1578 
  1612 
  1579     Returns:
  1613     Returns:
  1580       Path to located yaml file if one exists, else None.
  1614       Path to located yaml file if one exists, else None.
  1581     """
  1615     """
  1582     if not os.path.isdir(basepath):
  1616     if not os.path.isdir(basepath):
  1583       self.parser.error("Not a directory: %s" % basepath)
  1617       self.parser.error('Not a directory: %s' % basepath)
  1584 
  1618 
  1585     for yaml_file in (file_name + ".yaml", file_name + ".yml"):
  1619     for yaml_file in (file_name + '.yaml', file_name + '.yml'):
  1586       yaml_path = os.path.join(basepath, yaml_file)
  1620       yaml_path = os.path.join(basepath, yaml_file)
  1587       if os.path.isfile(yaml_path):
  1621       if os.path.isfile(yaml_path):
  1588         return yaml_path
  1622         return yaml_path
  1589 
  1623 
  1590     return None
  1624     return None
  1596       basepath: the directory of the application.
  1630       basepath: the directory of the application.
  1597 
  1631 
  1598     Returns:
  1632     Returns:
  1599       An AppInfoExternal object.
  1633       An AppInfoExternal object.
  1600     """
  1634     """
  1601     appyaml_filename = self._FindYaml(basepath, "app")
  1635     appyaml_filename = self._FindYaml(basepath, 'app')
  1602     if appyaml_filename is None:
  1636     if appyaml_filename is None:
  1603       self.parser.error("Directory does not contain an app.yaml "
  1637       self.parser.error('Directory does not contain an app.yaml '
  1604                         "configuration file.")
  1638                         'configuration file.')
  1605 
  1639 
  1606     fh = open(appyaml_filename, "r")
  1640     fh = open(appyaml_filename, 'r')
  1607     try:
  1641     try:
  1608       appyaml = appinfo.LoadSingleAppInfo(fh)
  1642       appyaml = appinfo.LoadSingleAppInfo(fh)
  1609     finally:
  1643     finally:
  1610       fh.close()
  1644       fh.close()
  1611     return appyaml
  1645     return appyaml
  1612 
  1646 
       
  1647   def _ParseYamlFile(self, basepath, basename, parser):
       
  1648     """Parses the a yaml file.
       
  1649 
       
  1650     Args:
       
  1651       basepath: the directory of the application.
       
  1652       basename: the base name of the file (with the '.yaml' stripped off).
       
  1653       parser: the function or method used to parse the file.
       
  1654 
       
  1655     Returns:
       
  1656       A single parsed yaml file or None if the file does not exist.
       
  1657     """
       
  1658     file_name = self._FindYaml(basepath, basename)
       
  1659     if file_name is not None:
       
  1660       fh = open(file_name, 'r')
       
  1661       try:
       
  1662         defns = parser(fh)
       
  1663       finally:
       
  1664         fh.close()
       
  1665       return defns
       
  1666     return None
       
  1667 
  1613   def _ParseIndexYaml(self, basepath):
  1668   def _ParseIndexYaml(self, basepath):
  1614     """Parses the index.yaml file.
  1669     """Parses the index.yaml file.
  1615 
  1670 
  1616     Args:
  1671     Args:
  1617       basepath: the directory of the application.
  1672       basepath: the directory of the application.
  1618 
  1673 
  1619     Returns:
  1674     Returns:
  1620       A single parsed yaml file or None if the file does not exist.
  1675       A single parsed yaml file or None if the file does not exist.
  1621     """
  1676     """
  1622     file_name = self._FindYaml(basepath, "index")
  1677     return self._ParseYamlFile(basepath, 'index',
  1623     if file_name is not None:
  1678                                datastore_index.ParseIndexDefinitions)
  1624       fh = open(file_name, "r")
       
  1625       try:
       
  1626         index_defs = datastore_index.ParseIndexDefinitions(fh)
       
  1627       finally:
       
  1628         fh.close()
       
  1629       return index_defs
       
  1630     return None
       
  1631 
  1679 
  1632   def _ParseCronYaml(self, basepath):
  1680   def _ParseCronYaml(self, basepath):
  1633     """Parses the cron.yaml file.
  1681     """Parses the cron.yaml file.
  1634 
  1682 
  1635     Args:
  1683     Args:
  1636       basepath: the directory of the application.
  1684       basepath: the directory of the application.
  1637 
  1685 
  1638     Returns:
  1686     Returns:
  1639       A CronInfoExternal object.
  1687       A CronInfoExternal object or None if the file does not exist.
  1640     """
  1688     """
  1641     file_name = self._FindYaml(basepath, "cron")
  1689     return self._ParseYamlFile(basepath, 'cron', croninfo.LoadSingleCron)
  1642     if file_name is not None:
  1690 
  1643       fh = open(file_name, "r")
  1691   def _ParseQueueYaml(self, basepath):
  1644       try:
  1692     """Parses the queue.yaml file.
  1645         cron_info = croninfo.LoadSingleCron(fh)
  1693 
  1646       finally:
  1694     Args:
  1647         fh.close()
  1695       basepath: the directory of the application.
  1648       return cron_info
  1696 
  1649     return None
  1697     Returns:
       
  1698       A CronInfoExternal object or None if the file does not exist.
       
  1699     """
       
  1700     return self._ParseYamlFile(basepath, 'queue', queueinfo.LoadSingleQueue)
  1650 
  1701 
  1651   def Help(self):
  1702   def Help(self):
  1652     """Prints help for a specific action.
  1703     """Prints help for a specific action.
  1653 
  1704 
  1654     Expects self.args[0] to contain the name of the action in question.
  1705     Expects self.args[0] to contain the name of the action in question.
  1655     Exits the program after printing the help message.
  1706     Exits the program after printing the help message.
  1656     """
  1707     """
  1657     if len(self.args) != 1 or self.args[0] not in self.actions:
  1708     if len(self.args) != 1 or self.args[0] not in self.actions:
  1658       self.parser.error("Expected a single action argument. Must be one of:\n" +
  1709       self.parser.error('Expected a single action argument. Must be one of:\n' +
  1659                         self._GetActionDescriptions())
  1710                         self._GetActionDescriptions())
  1660 
  1711 
  1661     action = self.actions[self.args[0]]
  1712     action = self.actions[self.args[0]]
  1662     self.parser, unused_options = self._MakeSpecificParser(action)
  1713     self.parser, unused_options = self._MakeSpecificParser(action)
  1663     self._PrintHelpAndExit(exit_code=0)
  1714     self._PrintHelpAndExit(exit_code=0)
  1664 
  1715 
  1665   def Update(self):
  1716   def Update(self):
  1666     """Updates and deploys a new appversion."""
  1717     """Updates and deploys a new appversion."""
  1667     if len(self.args) != 1:
  1718     if len(self.args) != 1:
  1668       self.parser.error("Expected a single <directory> argument.")
  1719       self.parser.error('Expected a single <directory> argument.')
  1669 
  1720 
  1670     basepath = self.args[0]
  1721     basepath = self.args[0]
  1671     appyaml = self._ParseAppYaml(basepath)
  1722     appyaml = self._ParseAppYaml(basepath)
  1672     rpc_server = self._GetRpcServer()
  1723     rpc_server = self._GetRpcServer()
  1673 
  1724 
  1674     updatecheck = self.update_check_class(rpc_server, appyaml)
  1725     updatecheck = self.update_check_class(rpc_server, appyaml)
  1675     updatecheck.CheckForUpdates()
  1726     updatecheck.CheckForUpdates()
  1676 
  1727 
  1677     appversion = AppVersionUpload(rpc_server, appyaml)
  1728     appversion = AppVersionUpload(rpc_server, appyaml)
  1678     appversion.DoUpload(FileIterator(basepath), self.options.max_size,
  1729     appversion.DoUpload(FileIterator(basepath), self.options.max_size,
  1679                         lambda path: open(os.path.join(basepath, path), "rb"))
  1730                         lambda path: open(os.path.join(basepath, path), 'rb'))
  1680 
  1731 
  1681     index_defs = self._ParseIndexYaml(basepath)
  1732     index_defs = self._ParseIndexYaml(basepath)
  1682     if index_defs:
  1733     if index_defs:
  1683       index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
  1734       index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
  1684       try:
  1735       try:
  1685         index_upload.DoUpload()
  1736         index_upload.DoUpload()
  1686       except urllib2.HTTPError, e:
  1737       except urllib2.HTTPError, e:
  1687         StatusUpdate("Error %d: --- begin server output ---\n"
  1738         StatusUpdate('Error %d: --- begin server output ---\n'
  1688                      "%s\n--- end server output ---" %
  1739                      '%s\n--- end server output ---' %
  1689                      (e.code, e.read().rstrip("\n")))
  1740                      (e.code, e.read().rstrip('\n')))
  1690         print >> self.error_fh, (
  1741         print >> self.error_fh, (
  1691             "Your app was updated, but there was an error updating your "
  1742             'Your app was updated, but there was an error updating your '
  1692             "indexes. Please retry later with appcfg.py update_indexes.")
  1743             'indexes. Please retry later with appcfg.py update_indexes.')
  1693 
  1744 
  1694     cron_entries = self._ParseCronYaml(basepath)
  1745     cron_entries = self._ParseCronYaml(basepath)
  1695     if cron_entries:
  1746     if cron_entries:
  1696       cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries)
  1747       cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries)
  1697       cron_upload.DoUpload()
  1748       cron_upload.DoUpload()
  1698 
  1749 
       
  1750     queue_entries = self._ParseQueueYaml(basepath)
       
  1751     if queue_entries:
       
  1752       queue_upload = QueueEntryUpload(rpc_server, appyaml, queue_entries)
       
  1753       queue_upload.DoUpload()
       
  1754 
  1699   def _UpdateOptions(self, parser):
  1755   def _UpdateOptions(self, parser):
  1700     """Adds update-specific options to 'parser'.
  1756     """Adds update-specific options to 'parser'.
  1701 
  1757 
  1702     Args:
  1758     Args:
  1703       parser: An instance of OptionsParser.
  1759       parser: An instance of OptionsParser.
  1704     """
  1760     """
  1705     parser.add_option("-S", "--max_size", type="int", dest="max_size",
  1761     parser.add_option('-S', '--max_size', type='int', dest='max_size',
  1706                       default=10485760, metavar="SIZE",
  1762                       default=10485760, metavar='SIZE',
  1707                       help="Maximum size of a file to upload.")
  1763                       help='Maximum size of a file to upload.')
  1708 
  1764 
  1709   def VacuumIndexes(self):
  1765   def VacuumIndexes(self):
  1710     """Deletes unused indexes."""
  1766     """Deletes unused indexes."""
  1711     if len(self.args) != 1:
  1767     if len(self.args) != 1:
  1712       self.parser.error("Expected a single <directory> argument.")
  1768       self.parser.error('Expected a single <directory> argument.')
  1713 
  1769 
  1714     basepath = self.args[0]
  1770     basepath = self.args[0]
  1715     config = self._ParseAppYaml(basepath)
  1771     config = self._ParseAppYaml(basepath)
  1716 
  1772 
  1717     index_defs = self._ParseIndexYaml(basepath)
  1773     index_defs = self._ParseIndexYaml(basepath)
  1728     """Adds vacuum_indexes-specific options to 'parser'.
  1784     """Adds vacuum_indexes-specific options to 'parser'.
  1729 
  1785 
  1730     Args:
  1786     Args:
  1731       parser: An instance of OptionsParser.
  1787       parser: An instance of OptionsParser.
  1732     """
  1788     """
  1733     parser.add_option("-f", "--force", action="store_true", dest="force_delete",
  1789     parser.add_option('-f', '--force', action='store_true', dest='force_delete',
  1734                       default=False,
  1790                       default=False,
  1735                       help="Force deletion without being prompted.")
  1791                       help='Force deletion without being prompted.')
  1736 
  1792 
  1737   def UpdateCron(self):
  1793   def UpdateCron(self):
  1738     """Updates any new or changed cron definitions."""
  1794     """Updates any new or changed cron definitions."""
  1739     if len(self.args) != 1:
  1795     if len(self.args) != 1:
  1740       self.parser.error("Expected a single <directory> argument.")
  1796       self.parser.error('Expected a single <directory> argument.')
  1741 
  1797 
  1742     basepath = self.args[0]
  1798     basepath = self.args[0]
  1743     appyaml = self._ParseAppYaml(basepath)
  1799     appyaml = self._ParseAppYaml(basepath)
  1744     rpc_server = self._GetRpcServer()
  1800     rpc_server = self._GetRpcServer()
  1745 
  1801 
  1749       cron_upload.DoUpload()
  1805       cron_upload.DoUpload()
  1750 
  1806 
  1751   def UpdateIndexes(self):
  1807   def UpdateIndexes(self):
  1752     """Updates indexes."""
  1808     """Updates indexes."""
  1753     if len(self.args) != 1:
  1809     if len(self.args) != 1:
  1754       self.parser.error("Expected a single <directory> argument.")
  1810       self.parser.error('Expected a single <directory> argument.')
  1755 
  1811 
  1756     basepath = self.args[0]
  1812     basepath = self.args[0]
  1757     appyaml = self._ParseAppYaml(basepath)
  1813     appyaml = self._ParseAppYaml(basepath)
  1758     rpc_server = self._GetRpcServer()
  1814     rpc_server = self._GetRpcServer()
  1759 
  1815 
  1760     index_defs = self._ParseIndexYaml(basepath)
  1816     index_defs = self._ParseIndexYaml(basepath)
  1761     if index_defs:
  1817     if index_defs:
  1762       index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
  1818       index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
  1763       index_upload.DoUpload()
  1819       index_upload.DoUpload()
  1764 
  1820 
       
  1821   def UpdateQueues(self):
       
  1822     """Updates any new or changed task queue definitions."""
       
  1823     if len(self.args) != 1:
       
  1824       self.parser.error('Expected a single <directory> argument.')
       
  1825 
       
  1826     basepath = self.args[0]
       
  1827     appyaml = self._ParseAppYaml(basepath)
       
  1828     rpc_server = self._GetRpcServer()
       
  1829 
       
  1830     queue_entries = self._ParseQueueYaml(basepath)
       
  1831     if queue_entries:
       
  1832       queue_upload = QueueEntryUpload(rpc_server, appyaml, queue_entries)
       
  1833       queue_upload.DoUpload()
       
  1834 
  1765   def Rollback(self):
  1835   def Rollback(self):
  1766     """Does a rollback of any existing transaction for this app version."""
  1836     """Does a rollback of any existing transaction for this app version."""
  1767     if len(self.args) != 1:
  1837     if len(self.args) != 1:
  1768       self.parser.error("Expected a single <directory> argument.")
  1838       self.parser.error('Expected a single <directory> argument.')
  1769 
  1839 
  1770     basepath = self.args[0]
  1840     basepath = self.args[0]
  1771     appyaml = self._ParseAppYaml(basepath)
  1841     appyaml = self._ParseAppYaml(basepath)
  1772 
  1842 
  1773     appversion = AppVersionUpload(self._GetRpcServer(), appyaml)
  1843     appversion = AppVersionUpload(self._GetRpcServer(), appyaml)
  1776 
  1846 
  1777   def RequestLogs(self):
  1847   def RequestLogs(self):
  1778     """Write request logs to a file."""
  1848     """Write request logs to a file."""
  1779     if len(self.args) != 2:
  1849     if len(self.args) != 2:
  1780       self.parser.error(
  1850       self.parser.error(
  1781           "Expected a <directory> argument and an <output_file> argument.")
  1851           'Expected a <directory> argument and an <output_file> argument.')
  1782     if (self.options.severity is not None and
  1852     if (self.options.severity is not None and
  1783         not 0 <= self.options.severity <= MAX_LOG_LEVEL):
  1853         not 0 <= self.options.severity <= MAX_LOG_LEVEL):
  1784       self.parser.error(
  1854       self.parser.error(
  1785           "Severity range is 0 (DEBUG) through %s (CRITICAL)." % MAX_LOG_LEVEL)
  1855           'Severity range is 0 (DEBUG) through %s (CRITICAL).' % MAX_LOG_LEVEL)
  1786 
  1856 
  1787     if self.options.num_days is None:
  1857     if self.options.num_days is None:
  1788       self.options.num_days = int(not self.options.append)
  1858       self.options.num_days = int(not self.options.append)
  1789     basepath = self.args[0]
  1859     basepath = self.args[0]
  1790     appyaml = self._ParseAppYaml(basepath)
  1860     appyaml = self._ParseAppYaml(basepath)
  1792     logs_requester = LogsRequester(rpc_server, appyaml, self.args[1],
  1862     logs_requester = LogsRequester(rpc_server, appyaml, self.args[1],
  1793                                    self.options.num_days,
  1863                                    self.options.num_days,
  1794                                    self.options.append,
  1864                                    self.options.append,
  1795                                    self.options.severity,
  1865                                    self.options.severity,
  1796                                    time.time(),
  1866                                    time.time(),
  1797                                    self.options.vhost)
  1867                                    self.options.vhost,
       
  1868                                    self.options.include_vhost)
  1798     logs_requester.DownloadLogs()
  1869     logs_requester.DownloadLogs()
  1799 
  1870 
  1800   def _RequestLogsOptions(self, parser):
  1871   def _RequestLogsOptions(self, parser):
  1801     """Adds request_logs-specific options to 'parser'.
  1872     """Adds request_logs-specific options to 'parser'.
  1802 
  1873 
  1803     Args:
  1874     Args:
  1804       parser: An instance of OptionsParser.
  1875       parser: An instance of OptionsParser.
  1805     """
  1876     """
  1806     parser.add_option("-n", "--num_days", type="int", dest="num_days",
  1877     parser.add_option('-n', '--num_days', type='int', dest='num_days',
  1807                       action="store", default=None,
  1878                       action='store', default=None,
  1808                       help="Number of days worth of log data to get. "
  1879                       help='Number of days worth of log data to get. '
  1809                       "The cut-off point is midnight UTC. "
  1880                       'The cut-off point is midnight UTC. '
  1810                       "Use 0 to get all available logs. "
  1881                       'Use 0 to get all available logs. '
  1811                       "Default is 1, unless --append is also given; "
  1882                       'Default is 1, unless --append is also given; '
  1812                       "then the default is 0.")
  1883                       'then the default is 0.')
  1813     parser.add_option("-a", "--append", dest="append",
  1884     parser.add_option('-a', '--append', dest='append',
  1814                       action="store_true", default=False,
  1885                       action='store_true', default=False,
  1815                       help="Append to existing file.")
  1886                       help='Append to existing file.')
  1816     parser.add_option("--severity", type="int", dest="severity",
  1887     parser.add_option('--severity', type='int', dest='severity',
  1817                       action="store", default=None,
  1888                       action='store', default=None,
  1818                       help="Severity of app-level log messages to get. "
  1889                       help='Severity of app-level log messages to get. '
  1819                       "The range is 0 (DEBUG) through 4 (CRITICAL). "
  1890                       'The range is 0 (DEBUG) through 4 (CRITICAL). '
  1820                       "If omitted, only request logs are returned.")
  1891                       'If omitted, only request logs are returned.')
  1821     parser.add_option("--vhost", type="string", dest="vhost",
  1892     parser.add_option('--vhost', type='string', dest='vhost',
  1822                       action="store", default=None,
  1893                       action='store', default=None,
  1823                       help="The virtual host of log messages to get. "
  1894                       help='The virtual host of log messages to get. '
  1824                       "If omitted, all log messages are returned.")
  1895                       'If omitted, all log messages are returned.')
       
  1896     parser.add_option('--include_vhost', dest='include_vhost',
       
  1897                       action='store_true', default=False,
       
  1898                       help='Include virtual host in log messages.')
  1825 
  1899 
  1826   def CronInfo(self, now=None, output=sys.stdout):
  1900   def CronInfo(self, now=None, output=sys.stdout):
  1827     """Displays information about cron definitions.
  1901     """Displays information about cron definitions.
  1828 
  1902 
  1829     Args:
  1903     Args:
  1830       now: used for testing.
  1904       now: used for testing.
  1831       output: Used for testing.
  1905       output: Used for testing.
  1832     """
  1906     """
  1833     if len(self.args) != 1:
  1907     if len(self.args) != 1:
  1834       self.parser.error("Expected a single <directory> argument.")
  1908       self.parser.error('Expected a single <directory> argument.')
  1835     if now is None:
  1909     if now is None:
  1836       now = datetime.datetime.now()
  1910       now = datetime.datetime.now()
  1837 
  1911 
  1838     basepath = self.args[0]
  1912     basepath = self.args[0]
  1839     cron_entries = self._ParseCronYaml(basepath)
  1913     cron_entries = self._ParseCronYaml(basepath)
  1840     if cron_entries and cron_entries.cron:
  1914     if cron_entries and cron_entries.cron:
  1841       for entry in cron_entries.cron:
  1915       for entry in cron_entries.cron:
  1842         description = entry.description
  1916         description = entry.description
  1843         if not description:
  1917         if not description:
  1844           description = "<no description>"
  1918           description = '<no description>'
  1845         print >>output, "\n%s:\nURL: %s\nSchedule: %s" % (description,
  1919         print >>output, '\n%s:\nURL: %s\nSchedule: %s' % (description,
  1846                                                           entry.url,
  1920                                                           entry.url,
  1847                                                           entry.schedule)
  1921                                                           entry.schedule)
  1848         schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
  1922         schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
  1849         matches = schedule.GetMatches(now, self.options.num_runs)
  1923         matches = schedule.GetMatches(now, self.options.num_runs)
  1850         for match in matches:
  1924         for match in matches:
  1851           print >>output, "%s, %s from now" % (
  1925           print >>output, '%s, %s from now' % (
  1852               match.strftime("%Y-%m-%d %H:%M:%S"), match - now)
  1926               match.strftime('%Y-%m-%d %H:%M:%S'), match - now)
  1853 
  1927 
  1854   def _CronInfoOptions(self, parser):
  1928   def _CronInfoOptions(self, parser):
  1855     """Adds cron_info-specific options to 'parser'.
  1929     """Adds cron_info-specific options to 'parser'.
  1856 
  1930 
  1857     Args:
  1931     Args:
  1858       parser: An instance of OptionsParser.
  1932       parser: An instance of OptionsParser.
  1859     """
  1933     """
  1860     parser.add_option("-n", "--num_runs", type="int", dest="num_runs",
  1934     parser.add_option('-n', '--num_runs', type='int', dest='num_runs',
  1861                       action="store", default=5,
  1935                       action='store', default=5,
  1862                       help="Number of runs of each cron job to display"
  1936                       help='Number of runs of each cron job to display'
  1863                       "Default is 5")
  1937                       'Default is 5')
  1864 
  1938 
  1865   def _CheckRequiredLoadOptions(self):
  1939   def _CheckRequiredLoadOptions(self):
  1866     """Checks that upload/download options are present."""
  1940     """Checks that upload/download options are present."""
  1867     for option in ["filename", "kind", "config_file"]:
  1941     for option in ['filename', 'kind', 'config_file']:
  1868       if getattr(self.options, option) is None:
  1942       if getattr(self.options, option) is None:
  1869         self.parser.error("Option '%s' is required." % option)
  1943         self.parser.error('Option \'%s\' is required.' % option)
  1870     if not self.options.url:
  1944     if not self.options.url:
  1871       self.parser.error("You must have google.appengine.ext.remote_api.handler "
  1945       self.parser.error('You must have google.appengine.ext.remote_api.handler '
  1872                         "assigned to an endpoint in app.yaml, or provide "
  1946                         'assigned to an endpoint in app.yaml, or provide '
  1873                         "the url of the handler via the 'url' option.")
  1947                         'the url of the handler via the \'url\' option.')
  1874 
  1948 
  1875   def InferRemoteApiUrl(self, appyaml):
  1949   def InferRemoteApiUrl(self, appyaml):
  1876     """Uses app.yaml to determine the remote_api endpoint.
  1950     """Uses app.yaml to determine the remote_api endpoint.
  1877 
  1951 
  1878     Args:
  1952     Args:
  1880 
  1954 
  1881     Returns:
  1955     Returns:
  1882       The url of the remote_api endpoint as a string, or None
  1956       The url of the remote_api endpoint as a string, or None
  1883     """
  1957     """
  1884     handlers = appyaml.handlers
  1958     handlers = appyaml.handlers
  1885     handler_suffix = "remote_api/handler.py"
  1959     handler_suffix = 'remote_api/handler.py'
  1886     app_id = appyaml.application
  1960     app_id = appyaml.application
  1887     for handler in handlers:
  1961     for handler in handlers:
  1888       if hasattr(handler, "script") and handler.script:
  1962       if hasattr(handler, 'script') and handler.script:
  1889         if handler.script.endswith(handler_suffix):
  1963         if handler.script.endswith(handler_suffix):
  1890           server = self.options.server
  1964           server = self.options.server
  1891           if server == "appengine.google.com":
  1965           if server == 'appengine.google.com':
  1892             return "http://%s.appspot.com%s" % (app_id, handler.url)
  1966             return 'http://%s.appspot.com%s' % (app_id, handler.url)
  1893           else:
  1967           else:
  1894             return "http://%s%s" % (server, handler.url)
  1968             return 'http://%s%s' % (server, handler.url)
  1895     return None
  1969     return None
  1896 
  1970 
  1897   def RunBulkloader(self, arg_dict):
  1971   def RunBulkloader(self, arg_dict):
  1898     """Invokes the bulkloader with the given keyword arguments.
  1972     """Invokes the bulkloader with the given keyword arguments.
  1899 
  1973 
  1901       arg_dict: Dictionary of arguments to pass to bulkloader.Run().
  1975       arg_dict: Dictionary of arguments to pass to bulkloader.Run().
  1902     """
  1976     """
  1903     try:
  1977     try:
  1904       import sqlite3
  1978       import sqlite3
  1905     except ImportError:
  1979     except ImportError:
  1906       logging.error("upload_data action requires SQLite3 and the python "
  1980       logging.error('upload_data action requires SQLite3 and the python '
  1907                     "sqlite3 module (included in python since 2.5).")
  1981                     'sqlite3 module (included in python since 2.5).')
  1908       sys.exit(1)
  1982       sys.exit(1)
  1909 
  1983 
  1910     sys.exit(bulkloader.Run(arg_dict))
  1984     sys.exit(bulkloader.Run(arg_dict))
  1911 
  1985 
  1912   def _SetupLoad(self):
  1986   def _SetupLoad(self):
  1913     """Performs common verification and set up for upload and download."""
  1987     """Performs common verification and set up for upload and download."""
  1914     if len(self.args) != 1:
  1988     if len(self.args) != 1:
  1915       self.parser.error("Expected <directory> argument.")
  1989       self.parser.error('Expected <directory> argument.')
  1916 
  1990 
  1917     basepath = self.args[0]
  1991     basepath = self.args[0]
  1918     appyaml = self._ParseAppYaml(basepath)
  1992     appyaml = self._ParseAppYaml(basepath)
  1919 
  1993 
  1920     self.options.app_id = appyaml.application
  1994     self.options.app_id = appyaml.application
  1925         self.options.url = url
  1999         self.options.url = url
  1926 
  2000 
  1927     self._CheckRequiredLoadOptions()
  2001     self._CheckRequiredLoadOptions()
  1928 
  2002 
  1929     if self.options.batch_size < 1:
  2003     if self.options.batch_size < 1:
  1930       self.parser.error("batch_size must be 1 or larger.")
  2004       self.parser.error('batch_size must be 1 or larger.')
  1931 
  2005 
  1932     if verbosity == 1:
  2006     if verbosity == 1:
  1933       logging.getLogger().setLevel(logging.INFO)
  2007       logging.getLogger().setLevel(logging.INFO)
  1934       self.options.debug = False
  2008       self.options.debug = False
  1935     else:
  2009     else:
  1937       self.options.debug = True
  2011       self.options.debug = True
  1938 
  2012 
  1939   def _MakeLoaderArgs(self):
  2013   def _MakeLoaderArgs(self):
  1940     return dict([(arg_name, getattr(self.options, arg_name, None)) for
  2014     return dict([(arg_name, getattr(self.options, arg_name, None)) for
  1941                  arg_name in (
  2015                  arg_name in (
  1942         "app_id",
  2016                      'app_id',
  1943         "url",
  2017                      'url',
  1944         "filename",
  2018                      'filename',
  1945         "batch_size",
  2019                      'batch_size',
  1946         "kind",
  2020                      'kind',
  1947         "num_threads",
  2021                      'num_threads',
  1948         "bandwidth_limit",
  2022                      'bandwidth_limit',
  1949         "rps_limit",
  2023                      'rps_limit',
  1950         "http_limit",
  2024                      'http_limit',
  1951         "db_filename",
  2025                      'db_filename',
  1952         "config_file",
  2026                      'config_file',
  1953         "auth_domain",
  2027                      'auth_domain',
  1954         "has_header",
  2028                      'has_header',
  1955         "loader_opts",
  2029                      'loader_opts',
  1956         "log_file",
  2030                      'log_file',
  1957         "passin",
  2031                      'passin',
  1958         "email",
  2032                      'email',
  1959         "debug",
  2033                      'debug',
  1960         "exporter_opts",
  2034                      'exporter_opts',
  1961         "result_db_filename",
  2035                      'result_db_filename',
  1962         )])
  2036                      )])
  1963 
  2037 
  1964   def PerformDownload(self, run_fn=None):
  2038   def PerformDownload(self, run_fn=None):
  1965     """Performs a datastore download via the bulkloader.
  2039     """Performs a datastore download via the bulkloader.
  1966 
  2040 
  1967     Args:
  2041     Args:
  1969     """
  2043     """
  1970     if run_fn is None:
  2044     if run_fn is None:
  1971       run_fn = self.RunBulkloader
  2045       run_fn = self.RunBulkloader
  1972     self._SetupLoad()
  2046     self._SetupLoad()
  1973 
  2047 
  1974     StatusUpdate("Downloading data records.")
  2048     StatusUpdate('Downloading data records.')
  1975 
  2049 
  1976     args = self._MakeLoaderArgs()
  2050     args = self._MakeLoaderArgs()
  1977     args['download'] = True
  2051     args['download'] = True
  1978     args['has_header'] = False
  2052     args['has_header'] = False
  1979 
  2053 
  1987     """
  2061     """
  1988     if run_fn is None:
  2062     if run_fn is None:
  1989       run_fn = self.RunBulkloader
  2063       run_fn = self.RunBulkloader
  1990     self._SetupLoad()
  2064     self._SetupLoad()
  1991 
  2065 
  1992     StatusUpdate("Uploading data records.")
  2066     StatusUpdate('Uploading data records.')
  1993 
  2067 
  1994     args = self._MakeLoaderArgs()
  2068     args = self._MakeLoaderArgs()
  1995     args['download'] = False
  2069     args['download'] = False
  1996 
  2070 
  1997     run_fn(args)
  2071     run_fn(args)
  2000     """Adds options common to 'upload_data' and 'download_data'.
  2074     """Adds options common to 'upload_data' and 'download_data'.
  2001 
  2075 
  2002     Args:
  2076     Args:
  2003       parser: An instance of OptionsParser.
  2077       parser: An instance of OptionsParser.
  2004     """
  2078     """
  2005     parser.add_option("--filename", type="string", dest="filename",
  2079     parser.add_option('--filename', type='string', dest='filename',
  2006                       action="store",
  2080                       action='store',
  2007                       help="The name of the file containing the input data."
  2081                       help='The name of the file containing the input data.'
  2008                       " (Required)")
  2082                       ' (Required)')
  2009     parser.add_option("--config_file", type="string", dest="config_file",
  2083     parser.add_option('--config_file', type='string', dest='config_file',
  2010                       action="store",
  2084                       action='store',
  2011                       help="Name of the configuration file. (Required)")
  2085                       help='Name of the configuration file. (Required)')
  2012     parser.add_option("--kind", type="string", dest="kind",
  2086     parser.add_option('--kind', type='string', dest='kind',
  2013                       action="store",
  2087                       action='store',
  2014                       help="The kind of the entities to store. (Required)")
  2088                       help='The kind of the entities to store. (Required)')
  2015     parser.add_option("--url", type="string", dest="url",
  2089     parser.add_option('--url', type='string', dest='url',
  2016                       action="store",
  2090                       action='store',
  2017                       help="The location of the remote_api endpoint.")
  2091                       help='The location of the remote_api endpoint.')
  2018     parser.add_option("--num_threads", type="int", dest="num_threads",
  2092     parser.add_option('--num_threads', type='int', dest='num_threads',
  2019                       action="store", default=10,
  2093                       action='store', default=10,
  2020                       help="Number of threads to upload records with.")
  2094                       help='Number of threads to upload records with.')
  2021     parser.add_option("--batch_size", type="int", dest="batch_size",
  2095     parser.add_option('--batch_size', type='int', dest='batch_size',
  2022                       action="store", default=10,
  2096                       action='store', default=10,
  2023                       help="Number of records to post in each request.")
  2097                       help='Number of records to post in each request.')
  2024     parser.add_option("--bandwidth_limit", type="int", dest="bandwidth_limit",
  2098     parser.add_option('--bandwidth_limit', type='int', dest='bandwidth_limit',
  2025                       action="store", default=250000,
  2099                       action='store', default=250000,
  2026                       help="The maximum bytes/second bandwidth for transfers.")
  2100                       help='The maximum bytes/second bandwidth for transfers.')
  2027     parser.add_option("--rps_limit", type="int", dest="rps_limit",
  2101     parser.add_option('--rps_limit', type='int', dest='rps_limit',
  2028                       action="store", default=20,
  2102                       action='store', default=20,
  2029                       help="The maximum records/second for transfers.")
  2103                       help='The maximum records/second for transfers.')
  2030     parser.add_option("--http_limit", type="int", dest="http_limit",
  2104     parser.add_option('--http_limit', type='int', dest='http_limit',
  2031                       action="store", default=8,
  2105                       action='store', default=8,
  2032                       help="The maximum requests/second for transfers.")
  2106                       help='The maximum requests/second for transfers.')
  2033     parser.add_option("--db_filename", type="string", dest="db_filename",
  2107     parser.add_option('--db_filename', type='string', dest='db_filename',
  2034                       action="store",
  2108                       action='store',
  2035                       help="Name of the progress database file.")
  2109                       help='Name of the progress database file.')
  2036     parser.add_option("--auth_domain", type="string", dest="auth_domain",
  2110     parser.add_option('--auth_domain', type='string', dest='auth_domain',
  2037                       action="store", default="gmail.com",
  2111                       action='store', default='gmail.com',
  2038                       help="The name of the authorization domain to use.")
  2112                       help='The name of the authorization domain to use.')
  2039     parser.add_option("--log_file", type="string", dest="log_file",
  2113     parser.add_option('--log_file', type='string', dest='log_file',
  2040                       help="File to write bulkloader logs.  If not supplied "
  2114                       help='File to write bulkloader logs.  If not supplied '
  2041                            "then a new log file will be created, named: "
  2115                       'then a new log file will be created, named: '
  2042                            "bulkloader-log-TIMESTAMP.")
  2116                       'bulkloader-log-TIMESTAMP.')
  2043 
  2117 
  2044   def _PerformUploadOptions(self, parser):
  2118   def _PerformUploadOptions(self, parser):
  2045     """Adds 'upload_data' specific options to the 'parser' passed in.
  2119     """Adds 'upload_data' specific options to the 'parser' passed in.
  2046 
  2120 
  2047     Args:
  2121     Args:
  2048       parser: An instance of OptionsParser.
  2122       parser: An instance of OptionsParser.
  2049     """
  2123     """
  2050     self._PerformLoadOptions(parser)
  2124     self._PerformLoadOptions(parser)
  2051     parser.add_option("--has_header", dest="has_header",
  2125     parser.add_option('--has_header', dest='has_header',
  2052                       action="store_true", default=False,
  2126                       action='store_true', default=False,
  2053                       help="Whether the first line of the input file should be"
  2127                       help='Whether the first line of the input file should be'
  2054                       " skipped")
  2128                       ' skipped')
  2055     parser.add_option("--loader_opts", type="string", dest="loader_opts",
  2129     parser.add_option('--loader_opts', type='string', dest='loader_opts',
  2056                       help="A string to pass to the Loader.initialize method.")
  2130                       help='A string to pass to the Loader.initialize method.')
  2057 
  2131 
  2058   def _PerformDownloadOptions(self, parser):
  2132   def _PerformDownloadOptions(self, parser):
  2059     """Adds 'download_data' specific options to the 'parser' passed in.
  2133     """Adds 'download_data' specific options to the 'parser' passed in.
  2060 
  2134 
  2061     Args:
  2135     Args:
  2062       parser: An instance of OptionsParser.
  2136       parser: An instance of OptionsParser.
  2063     """
  2137     """
  2064     self._PerformLoadOptions(parser)
  2138     self._PerformLoadOptions(parser)
  2065     parser.add_option("--exporter_opts", type="string", dest="exporter_opts",
  2139     parser.add_option('--exporter_opts', type='string', dest='exporter_opts',
  2066                       help="A string to pass to the Exporter.initialize method."
  2140                       help='A string to pass to the Exporter.initialize method.'
  2067                       )
  2141                      )
  2068     parser.add_option("--result_db_filename", type="string",
  2142     parser.add_option('--result_db_filename', type='string',
  2069                       dest="result_db_filename",
  2143                       dest='result_db_filename',
  2070                       action="store",
  2144                       action='store',
  2071                       help="Database to write entities to for download.")
  2145                       help='Database to write entities to for download.')
  2072 
  2146 
  2073   class Action(object):
  2147   class Action(object):
  2074     """Contains information about a command line action.
  2148     """Contains information about a command line action.
  2075 
  2149 
  2076     Attributes:
  2150     Attributes:
  2082         formatting will be preserved.
  2156         formatting will be preserved.
  2083       options: A function that will add extra options to a given OptionParser
  2157       options: A function that will add extra options to a given OptionParser
  2084         object.
  2158         object.
  2085     """
  2159     """
  2086 
  2160 
  2087     def __init__(self, function, usage, short_desc, long_desc="",
  2161     def __init__(self, function, usage, short_desc, long_desc='',
  2088                  options=lambda obj, parser: None):
  2162                  options=lambda obj, parser: None):
  2089       """Initializer for the class attributes."""
  2163       """Initializer for the class attributes."""
  2090       self.function = function
  2164       self.function = function
  2091       self.usage = usage
  2165       self.usage = usage
  2092       self.short_desc = short_desc
  2166       self.short_desc = short_desc
  2095 
  2169 
  2096     def __call__(self, appcfg):
  2170     def __call__(self, appcfg):
  2097       """Invoke this Action on the specified AppCfg.
  2171       """Invoke this Action on the specified AppCfg.
  2098 
  2172 
  2099       This calls the function of the appropriate name on AppCfg, and
  2173       This calls the function of the appropriate name on AppCfg, and
  2100       respects polymophic overrides."""
  2174       respects polymophic overrides.
       
  2175 
       
  2176       Args:
       
  2177         appcfg: The appcfg to use.
       
  2178       Returns:
       
  2179         The result of the function call.
       
  2180       """
  2101       method = getattr(appcfg, self.function)
  2181       method = getattr(appcfg, self.function)
  2102       return method()
  2182       return method()
  2103 
  2183 
  2104   actions = {
  2184   actions = {
  2105 
  2185 
  2106       "help": Action(
  2186       'help': Action(
  2107           function="Help",
  2187           function='Help',
  2108           usage="%prog help <action>",
  2188           usage='%prog help <action>',
  2109           short_desc="Print help for a specific action."),
  2189           short_desc='Print help for a specific action.'),
  2110 
  2190 
  2111       "update": Action(
  2191       'update': Action(
  2112           function="Update",
  2192           function='Update',
  2113           usage="%prog [options] update <directory>",
  2193           usage='%prog [options] update <directory>',
  2114           options=_UpdateOptions,
  2194           options=_UpdateOptions,
  2115           short_desc="Create or update an app version.",
  2195           short_desc='Create or update an app version.',
  2116           long_desc="""
  2196           long_desc="""
  2117 Specify a directory that contains all of the files required by
  2197 Specify a directory that contains all of the files required by
  2118 the app, and appcfg.py will create/update the app version referenced
  2198 the app, and appcfg.py will create/update the app version referenced
  2119 in the app.yaml file at the top level of that directory.  appcfg.py
  2199 in the app.yaml file at the top level of that directory.  appcfg.py
  2120 will follow symlinks and recursively upload all files to the server.
  2200 will follow symlinks and recursively upload all files to the server.
  2121 Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""),
  2201 Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""),
  2122 
  2202 
  2123       "update_cron": Action(
  2203       'update_cron': Action(
  2124           function="UpdateCron",
  2204           function='UpdateCron',
  2125           usage="%prog [options] update_cron <directory>",
  2205           usage='%prog [options] update_cron <directory>',
  2126           short_desc="Update application cron definitions.",
  2206           short_desc='Update application cron definitions.',
  2127           long_desc="""
  2207           long_desc="""
  2128 The 'update_cron' command will update any new, removed or changed cron
  2208 The 'update_cron' command will update any new, removed or changed cron
  2129 definitions from the cron.yaml file."""),
  2209 definitions from the optional cron.yaml file."""),
  2130 
  2210 
  2131       "update_indexes": Action(
  2211       'update_indexes': Action(
  2132           function="UpdateIndexes",
  2212           function='UpdateIndexes',
  2133           usage="%prog [options] update_indexes <directory>",
  2213           usage='%prog [options] update_indexes <directory>',
  2134           short_desc="Update application indexes.",
  2214           short_desc='Update application indexes.',
  2135           long_desc="""
  2215           long_desc="""
  2136 The 'update_indexes' command will add additional indexes which are not currently
  2216 The 'update_indexes' command will add additional indexes which are not currently
  2137 in production as well as restart any indexes that were not completed."""),
  2217 in production as well as restart any indexes that were not completed."""),
  2138 
  2218 
  2139       "vacuum_indexes": Action(
  2219       'update_queues': Action(
  2140           function="VacuumIndexes",
  2220           function='UpdateQueues',
  2141           usage="%prog [options] vacuum_indexes <directory>",
  2221           usage='%prog [options] update_queues <directory>',
       
  2222           short_desc='Update application task queue definitions.',
       
  2223           long_desc="""
       
  2224 The 'update_queue' command will update any new, removed or changed task queue
       
  2225 definitions from the optional queue.yaml file."""),
       
  2226 
       
  2227       'vacuum_indexes': Action(
       
  2228           function='VacuumIndexes',
       
  2229           usage='%prog [options] vacuum_indexes <directory>',
  2142           options=_VacuumIndexesOptions,
  2230           options=_VacuumIndexesOptions,
  2143           short_desc="Delete unused indexes from application.",
  2231           short_desc='Delete unused indexes from application.',
  2144           long_desc="""
  2232           long_desc="""
  2145 The 'vacuum_indexes' command will help clean up indexes which are no longer
  2233 The 'vacuum_indexes' command will help clean up indexes which are no longer
  2146 in use.  It does this by comparing the local index configuration with
  2234 in use.  It does this by comparing the local index configuration with
  2147 indexes that are actually defined on the server.  If any indexes on the
  2235 indexes that are actually defined on the server.  If any indexes on the
  2148 server do not exist in the index configuration file, the user is given the
  2236 server do not exist in the index configuration file, the user is given the
  2149 option to delete them."""),
  2237 option to delete them."""),
  2150 
  2238 
  2151       "rollback": Action(
  2239       'rollback': Action(
  2152           function="Rollback",
  2240           function='Rollback',
  2153           usage="%prog [options] rollback <directory>",
  2241           usage='%prog [options] rollback <directory>',
  2154           short_desc="Rollback an in-progress update.",
  2242           short_desc='Rollback an in-progress update.',
  2155           long_desc="""
  2243           long_desc="""
  2156 The 'update' command requires a server-side transaction.  Use 'rollback'
  2244 The 'update' command requires a server-side transaction.  Use 'rollback'
  2157 if you get an error message about another transaction being in progress
  2245 if you get an error message about another transaction being in progress
  2158 and you are sure that there is no such transaction."""),
  2246 and you are sure that there is no such transaction."""),
  2159 
  2247 
  2160       "request_logs": Action(
  2248       'request_logs': Action(
  2161           function="RequestLogs",
  2249           function='RequestLogs',
  2162           usage="%prog [options] request_logs <directory> <output_file>",
  2250           usage='%prog [options] request_logs <directory> <output_file>',
  2163           options=_RequestLogsOptions,
  2251           options=_RequestLogsOptions,
  2164           short_desc="Write request logs in Apache common log format.",
  2252           short_desc='Write request logs in Apache common log format.',
  2165           long_desc="""
  2253           long_desc="""
  2166 The 'request_logs' command exports the request logs from your application
  2254 The 'request_logs' command exports the request logs from your application
  2167 to a file.  It will write Apache common log format records ordered
  2255 to a file.  It will write Apache common log format records ordered
  2168 chronologically.  If output file is '-' stdout will be written."""),
  2256 chronologically.  If output file is '-' stdout will be written."""),
  2169 
  2257 
  2170       "cron_info": Action(
  2258       'cron_info': Action(
  2171           function="CronInfo",
  2259           function='CronInfo',
  2172           usage="%prog [options] cron_info <directory>",
  2260           usage='%prog [options] cron_info <directory>',
  2173           options=_CronInfoOptions,
  2261           options=_CronInfoOptions,
  2174           short_desc="Display information about cron jobs.",
  2262           short_desc='Display information about cron jobs.',
  2175           long_desc="""
  2263           long_desc="""
  2176 The 'cron_info' command will display the next 'number' runs (default 5) for
  2264 The 'cron_info' command will display the next 'number' runs (default 5) for
  2177 each cron job defined in the cron.yaml file."""),
  2265 each cron job defined in the cron.yaml file."""),
  2178 
  2266 
  2179       "upload_data": Action(
  2267       'upload_data': Action(
  2180           function="PerformUpload",
  2268           function='PerformUpload',
  2181           usage="%prog [options] upload_data <directory>",
  2269           usage='%prog [options] upload_data <directory>',
  2182           options=_PerformUploadOptions,
  2270           options=_PerformUploadOptions,
  2183           short_desc="Upload data records to datastore.",
  2271           short_desc='Upload data records to datastore.',
  2184           long_desc="""
  2272           long_desc="""
  2185 The 'upload_data' command translates input records into datastore entities and
  2273 The 'upload_data' command translates input records into datastore entities and
  2186 uploads them into your application's datastore."""),
  2274 uploads them into your application's datastore."""),
  2187 
  2275 
  2188       "download_data": Action(
  2276       'download_data': Action(
  2189           function="PerformDownload",
  2277           function='PerformDownload',
  2190           usage="%prog [options] download_data <directory>",
  2278           usage='%prog [options] download_data <directory>',
  2191           options=_PerformDownloadOptions,
  2279           options=_PerformDownloadOptions,
  2192           short_desc="Download entities from datastore.",
  2280           short_desc='Download entities from datastore.',
  2193           long_desc="""
  2281           long_desc="""
  2194 The 'download_data' command downloads datastore entities and writes them to
  2282 The 'download_data' command downloads datastore entities and writes them to
  2195 file as CSV or developer defined format."""),
  2283 file as CSV or developer defined format."""),
  2196 
  2284 
  2197 
  2285 
  2198 
  2286 
  2199   }
  2287   }
  2200 
  2288 
  2201 
  2289 
  2202 def main(argv):
  2290 def main(argv):
  2203   logging.basicConfig(format=("%(asctime)s %(levelname)s %(filename)s:"
  2291   logging.basicConfig(format=('%(asctime)s %(levelname)s %(filename)s:'
  2204                               "%(lineno)s %(message)s "))
  2292                               '%(lineno)s %(message)s '))
  2205   try:
  2293   try:
  2206     result = AppCfgApp(argv).Run()
  2294     result = AppCfgApp(argv).Run()
  2207     if result:
  2295     if result:
  2208       sys.exit(result)
  2296       sys.exit(result)
  2209   except KeyboardInterrupt:
  2297   except KeyboardInterrupt:
  2210     StatusUpdate("Interrupted.")
  2298     StatusUpdate('Interrupted.')
  2211     sys.exit(1)
  2299     sys.exit(1)
  2212 
  2300 
  2213 
  2301 
  2214 if __name__ == "__main__":
  2302 if __name__ == '__main__':
  2215   main(sys.argv)
  2303   main(sys.argv)