|
1 #!/usr/bin/env python |
|
2 # |
|
3 # Copyright 2007 Google Inc. |
|
4 # |
|
5 # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 # you may not use this file except in compliance with the License. |
|
7 # You may obtain a copy of the License at |
|
8 # |
|
9 # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 # |
|
11 # Unless required by applicable law or agreed to in writing, software |
|
12 # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 # See the License for the specific language governing permissions and |
|
15 # limitations under the License. |
|
16 # |
|
17 |
|
18 """Tool for deploying apps to an app server. |
|
19 |
|
20 Currently, the application only uploads new appversions. To do this, it first |
|
21 walks the directory tree rooted at the path the user specifies, adding all the |
|
22 files it finds to a list. It then uploads the application configuration |
|
23 (app.yaml) to the server using HTTP, followed by uploading each of the files. |
|
24 It then commits the transaction with another request. |
|
25 |
|
26 The bulk of this work is handled by the AppVersionUpload class, which exposes |
|
27 methods to add to the list of files, fetch a list of modified files, upload |
|
28 files, and commit or rollback the transaction. |
|
29 """ |
|
30 |
|
31 |
|
32 import cookielib |
|
33 import datetime |
|
34 import getpass |
|
35 import logging |
|
36 import mimetypes |
|
37 import optparse |
|
38 import os |
|
39 import re |
|
40 import sha |
|
41 import socket |
|
42 import sys |
|
43 import tempfile |
|
44 import time |
|
45 import urllib |
|
46 import urllib2 |
|
47 |
|
48 import google |
|
49 from google.appengine.api import appinfo |
|
50 from google.appengine.api import validation |
|
51 from google.appengine.api import yaml_errors |
|
52 from google.appengine.api import yaml_object |
|
53 from google.appengine.datastore import datastore_index |
|
54 import yaml |
|
55 |
|
56 |
|
57 MAX_FILES_TO_CLONE = 100 |
|
58 LIST_DELIMITER = "\n" |
|
59 TUPLE_DELIMITER = "|" |
|
60 |
|
61 VERSION_FILE = "../VERSION" |
|
62 |
|
63 UPDATE_CHECK_TIMEOUT = 3 |
|
64 |
|
65 NAG_FILE = ".appcfg_nag" |
|
66 |
|
67 MAX_LOG_LEVEL = 4 |
|
68 |
|
69 verbosity = 1 |
|
70 |
|
71 |
|
72 def StatusUpdate(msg): |
|
73 """Print a status message to stderr. |
|
74 |
|
75 If 'verbosity' is greater than 0, print the message. |
|
76 |
|
77 Args: |
|
78 msg: The string to print. |
|
79 """ |
|
80 if verbosity > 0: |
|
81 print >>sys.stderr, msg |
|
82 |
|
83 |
|
84 class ClientLoginError(urllib2.HTTPError): |
|
85 """Raised to indicate there was an error authenticating with ClientLogin.""" |
|
86 |
|
87 def __init__(self, url, code, msg, headers, args): |
|
88 urllib2.HTTPError.__init__(self, url, code, msg, headers, None) |
|
89 self.args = args |
|
90 self.reason = args["Error"] |
|
91 |
|
92 |
|
93 class AbstractRpcServer(object): |
|
94 """Provides a common interface for a simple RPC server.""" |
|
95 |
|
96 def __init__(self, host, auth_function, host_override=None, |
|
97 extra_headers=None, save_cookies=False): |
|
98 """Creates a new HttpRpcServer. |
|
99 |
|
100 Args: |
|
101 host: The host to send requests to. |
|
102 auth_function: A function that takes no arguments and returns an |
|
103 (email, password) tuple when called. Will be called if authentication |
|
104 is required. |
|
105 host_override: The host header to send to the server (defaults to host). |
|
106 extra_headers: A dict of extra headers to append to every request. Values |
|
107 supplied here will override other default headers that are supplied. |
|
108 save_cookies: If True, save the authentication cookies to local disk. |
|
109 If False, use an in-memory cookiejar instead. Subclasses must |
|
110 implement this functionality. Defaults to False. |
|
111 """ |
|
112 self.host = host |
|
113 self.host_override = host_override |
|
114 self.auth_function = auth_function |
|
115 self.authenticated = False |
|
116 |
|
117 self.extra_headers = { |
|
118 "User-agent": GetUserAgent() |
|
119 } |
|
120 if extra_headers: |
|
121 self.extra_headers.update(extra_headers) |
|
122 |
|
123 self.save_cookies = save_cookies |
|
124 self.cookie_jar = cookielib.MozillaCookieJar() |
|
125 self.opener = self._GetOpener() |
|
126 if self.host_override: |
|
127 logging.info("Server: %s; Host: %s", self.host, self.host_override) |
|
128 else: |
|
129 logging.info("Server: %s", self.host) |
|
130 |
|
131 def _GetOpener(self): |
|
132 """Returns an OpenerDirector for making HTTP requests. |
|
133 |
|
134 Returns: |
|
135 A urllib2.OpenerDirector object. |
|
136 """ |
|
137 raise NotImplemented() |
|
138 |
|
139 def _CreateRequest(self, url, data=None): |
|
140 """Creates a new urllib request.""" |
|
141 logging.debug("Creating request for: '%s' with payload:\n%s", url, data) |
|
142 req = urllib2.Request(url, data=data) |
|
143 if self.host_override: |
|
144 req.add_header("Host", self.host_override) |
|
145 for key, value in self.extra_headers.iteritems(): |
|
146 req.add_header(key, value) |
|
147 return req |
|
148 |
|
149 def _GetAuthToken(self, email, password): |
|
150 """Uses ClientLogin to authenticate the user, returning an auth token. |
|
151 |
|
152 Args: |
|
153 email: The user's email address |
|
154 password: The user's password |
|
155 |
|
156 Raises: |
|
157 ClientLoginError: If there was an error authenticating with ClientLogin. |
|
158 HTTPError: If there was some other form of HTTP error. |
|
159 |
|
160 Returns: |
|
161 The authentication token returned by ClientLogin. |
|
162 """ |
|
163 req = self._CreateRequest( |
|
164 url="https://www.google.com/accounts/ClientLogin", |
|
165 data=urllib.urlencode({ |
|
166 "Email": email, |
|
167 "Passwd": password, |
|
168 "service": "ah", |
|
169 "source": "Google-appcfg-1.0", |
|
170 "accountType": "HOSTED_OR_GOOGLE" |
|
171 }) |
|
172 ) |
|
173 try: |
|
174 response = self.opener.open(req) |
|
175 response_body = response.read() |
|
176 response_dict = dict(x.split("=") |
|
177 for x in response_body.split("\n") if x) |
|
178 return response_dict["Auth"] |
|
179 except urllib2.HTTPError, e: |
|
180 if e.code == 403: |
|
181 body = e.read() |
|
182 response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) |
|
183 raise ClientLoginError(req.get_full_url(), e.code, e.msg, |
|
184 e.headers, response_dict) |
|
185 else: |
|
186 raise |
|
187 |
|
188 def _GetAuthCookie(self, auth_token): |
|
189 """Fetches authentication cookies for an authentication token. |
|
190 |
|
191 Args: |
|
192 auth_token: The authentication token returned by ClientLogin. |
|
193 |
|
194 Raises: |
|
195 HTTPError: If there was an error fetching the authentication cookies. |
|
196 """ |
|
197 continue_location = "http://localhost/" |
|
198 args = {"continue": continue_location, "auth": auth_token} |
|
199 req = self._CreateRequest("http://%s/_ah/login?%s" % |
|
200 (self.host, urllib.urlencode(args))) |
|
201 try: |
|
202 response = self.opener.open(req) |
|
203 except urllib2.HTTPError, e: |
|
204 response = e |
|
205 if (response.code != 302 or |
|
206 response.info()["location"] != continue_location): |
|
207 raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, |
|
208 response.headers, response.fp) |
|
209 self.authenticated = True |
|
210 |
|
211 def _Authenticate(self): |
|
212 """Authenticates the user. |
|
213 |
|
214 The authentication process works as follows: |
|
215 1) We get a username and password from the user |
|
216 2) We use ClientLogin to obtain an AUTH token for the user |
|
217 (see http://code.google.com/apis/accounts/AuthForInstalledApps.html). |
|
218 3) We pass the auth token to /_ah/login on the server to obtain an |
|
219 authentication cookie. If login was successful, it tries to redirect |
|
220 us to the URL we provided. |
|
221 |
|
222 If we attempt to access the upload API without first obtaining an |
|
223 authentication cookie, it returns a 401 response and directs us to |
|
224 authenticate ourselves with ClientLogin. |
|
225 """ |
|
226 for i in range(3): |
|
227 credentials = self.auth_function() |
|
228 try: |
|
229 auth_token = self._GetAuthToken(credentials[0], credentials[1]) |
|
230 except ClientLoginError, e: |
|
231 if e.reason == "BadAuthentication": |
|
232 print >>sys.stderr, "Invalid username or password." |
|
233 continue |
|
234 if e.reason == "CaptchaRequired": |
|
235 print >>sys.stderr, ( |
|
236 "Please go to\n" |
|
237 "https://www.google.com/accounts/DisplayUnlockCaptcha\n" |
|
238 "and verify you are a human. Then try again.") |
|
239 break; |
|
240 if e.reason == "NotVerified": |
|
241 print >>sys.stderr, "Account not verified." |
|
242 break |
|
243 if e.reason == "TermsNotAgreed": |
|
244 print >>sys.stderr, "User has not agreed to TOS." |
|
245 break |
|
246 if e.reason == "AccountDeleted": |
|
247 print >>sys.stderr, "The user account has been deleted." |
|
248 break |
|
249 if e.reason == "AccountDisabled": |
|
250 print >>sys.stderr, "The user account has been disabled." |
|
251 break |
|
252 if e.reason == "ServiceDisabled": |
|
253 print >>sys.stderr, ("The user's access to the service has been " |
|
254 "disabled.") |
|
255 break |
|
256 if e.reason == "ServiceUnavailable": |
|
257 print >>sys.stderr, "The service is not available; try again later." |
|
258 break |
|
259 raise |
|
260 self._GetAuthCookie(auth_token) |
|
261 return |
|
262 |
|
263 def Send(self, request_path, payload="", |
|
264 content_type="application/octet-stream", |
|
265 timeout=None, |
|
266 **kwargs): |
|
267 """Sends an RPC and returns the response. |
|
268 |
|
269 Args: |
|
270 request_path: The path to send the request to, eg /api/appversion/create. |
|
271 payload: The body of the request, or None to send an empty request. |
|
272 content_type: The Content-Type header to use. |
|
273 timeout: timeout in seconds; default None i.e. no timeout. |
|
274 (Note: for large requests on OS X, the timeout doesn't work right.) |
|
275 kwargs: Any keyword arguments are converted into query string parameters. |
|
276 |
|
277 Returns: |
|
278 The response body, as a string. |
|
279 """ |
|
280 if not self.authenticated: |
|
281 self._Authenticate() |
|
282 |
|
283 old_timeout = socket.getdefaulttimeout() |
|
284 socket.setdefaulttimeout(timeout) |
|
285 try: |
|
286 tries = 0 |
|
287 while True: |
|
288 tries += 1 |
|
289 args = dict(kwargs) |
|
290 url = "http://%s%s?%s" % (self.host, request_path, |
|
291 urllib.urlencode(args)) |
|
292 req = self._CreateRequest(url=url, data=payload) |
|
293 req.add_header("Content-Type", content_type) |
|
294 req.add_header("X-appcfg-api-version", "1") |
|
295 try: |
|
296 f = self.opener.open(req) |
|
297 response = f.read() |
|
298 f.close() |
|
299 return response |
|
300 except urllib2.HTTPError, e: |
|
301 if tries > 3: |
|
302 raise |
|
303 elif e.code == 401: |
|
304 self._Authenticate() |
|
305 elif e.code >= 500 and e.code < 600: |
|
306 continue |
|
307 else: |
|
308 raise |
|
309 finally: |
|
310 socket.setdefaulttimeout(old_timeout) |
|
311 |
|
312 |
|
313 class HttpRpcServer(AbstractRpcServer): |
|
314 """Provides a simplified RPC-style interface for HTTP requests.""" |
|
315 |
|
316 DEFAULT_COOKIE_FILE_PATH = "~/.appcfg_cookies" |
|
317 |
|
318 def _Authenticate(self): |
|
319 """Save the cookie jar after authentication.""" |
|
320 super(HttpRpcServer, self)._Authenticate() |
|
321 if self.cookie_jar.filename is not None and self.save_cookies: |
|
322 StatusUpdate("Saving authentication cookies to %s" % |
|
323 self.cookie_jar.filename) |
|
324 self.cookie_jar.save() |
|
325 |
|
326 def _GetOpener(self): |
|
327 """Returns an OpenerDirector that supports cookies and ignores redirects. |
|
328 |
|
329 Returns: |
|
330 A urllib2.OpenerDirector object. |
|
331 """ |
|
332 opener = urllib2.OpenerDirector() |
|
333 opener.add_handler(urllib2.ProxyHandler()) |
|
334 opener.add_handler(urllib2.UnknownHandler()) |
|
335 opener.add_handler(urllib2.HTTPHandler()) |
|
336 opener.add_handler(urllib2.HTTPDefaultErrorHandler()) |
|
337 opener.add_handler(urllib2.HTTPSHandler()) |
|
338 opener.add_handler(urllib2.HTTPErrorProcessor()) |
|
339 |
|
340 if self.save_cookies: |
|
341 self.cookie_jar.filename = os.path.expanduser(HttpRpcServer.DEFAULT_COOKIE_FILE_PATH) |
|
342 |
|
343 if os.path.exists(self.cookie_jar.filename): |
|
344 try: |
|
345 self.cookie_jar.load() |
|
346 self.authenticated = True |
|
347 StatusUpdate("Loaded authentication cookies from %s" % |
|
348 self.cookie_jar.filename) |
|
349 except (OSError, IOError, cookielib.LoadError), e: |
|
350 logging.debug("Could not load authentication cookies; %s: %s", |
|
351 e.__class__.__name__, e) |
|
352 self.cookie_jar.filename = None |
|
353 else: |
|
354 try: |
|
355 fd = os.open(self.cookie_jar.filename, os.O_CREAT, 0600) |
|
356 os.close(fd) |
|
357 except (OSError, IOError), e: |
|
358 logging.debug("Could not create authentication cookies file; %s: %s", |
|
359 e.__class__.__name__, e) |
|
360 self.cookie_jar.filename = None |
|
361 |
|
362 opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) |
|
363 return opener |
|
364 |
|
365 |
|
366 def GetMimeTypeIfStaticFile(config, filename): |
|
367 """Looks up the mime type for 'filename'. |
|
368 |
|
369 Uses the handlers in 'config' to determine if the file should |
|
370 be treated as a static file. |
|
371 |
|
372 Args: |
|
373 config: The app.yaml object to check the filename against. |
|
374 filename: The name of the file. |
|
375 |
|
376 Returns: |
|
377 The mime type string. For example, 'text/plain' or 'image/gif'. |
|
378 None if this is not a static file. |
|
379 """ |
|
380 for handler in config.handlers: |
|
381 handler_type = handler.GetHandlerType() |
|
382 if handler_type in ("static_dir", "static_files"): |
|
383 if handler_type == "static_dir": |
|
384 regex = os.path.join(re.escape(handler.GetHandler()), ".*") |
|
385 else: |
|
386 regex = handler.upload |
|
387 if re.match(regex, filename): |
|
388 if handler.mime_type is not None: |
|
389 return handler.mime_type |
|
390 else: |
|
391 guess = mimetypes.guess_type(filename)[0] |
|
392 if guess is None: |
|
393 default = "application/octet-stream" |
|
394 print >>sys.stderr, ("Could not guess mimetype for %s. Using %s." |
|
395 % (filename, default)) |
|
396 return default |
|
397 return guess |
|
398 return None |
|
399 |
|
400 |
|
401 def BuildClonePostBody(file_tuples): |
|
402 """Build the post body for the /api/clone{files,blobs} urls. |
|
403 |
|
404 Args: |
|
405 file_tuples: A list of tuples. Each tuple should contain the entries |
|
406 appropriate for the endpoint in question. |
|
407 |
|
408 Returns: |
|
409 A string containing the properly delimited tuples. |
|
410 """ |
|
411 file_list = [] |
|
412 for tup in file_tuples: |
|
413 path = tup[0] |
|
414 tup = tup[1:] |
|
415 file_list.append(TUPLE_DELIMITER.join([path] + list(tup))) |
|
416 return LIST_DELIMITER.join(file_list) |
|
417 |
|
418 |
|
419 class NagFile(validation.Validated): |
|
420 """A validated YAML class to represent the user's nag preferences. |
|
421 |
|
422 Attributes: |
|
423 timestamp: The timestamp of the last nag. |
|
424 opt_in: True if the user wants to check for updates on dev_appserver |
|
425 start. False if not. May be None if we have not asked the user yet. |
|
426 """ |
|
427 |
|
428 ATTRIBUTES = { |
|
429 "timestamp": validation.TYPE_FLOAT, |
|
430 "opt_in": validation.Optional(validation.TYPE_BOOL), |
|
431 } |
|
432 |
|
433 @staticmethod |
|
434 def Load(nag_file): |
|
435 """Load a single NagFile object where one and only one is expected. |
|
436 |
|
437 Args: |
|
438 nag_file: A file-like object or string containing the yaml data to parse. |
|
439 |
|
440 Returns: |
|
441 A NagFile instance. |
|
442 """ |
|
443 return yaml_object.BuildSingleObject(NagFile, nag_file) |
|
444 |
|
445 |
|
446 def GetVersionObject(isfile=os.path.isfile, open_fn=open): |
|
447 """Gets the version of the SDK by parsing the VERSION file. |
|
448 |
|
449 Args: |
|
450 isfile, open_fn: Used for testing. |
|
451 |
|
452 Returns: |
|
453 A Yaml object or None if the VERSION file does not exist. |
|
454 """ |
|
455 version_filename = os.path.join(os.path.dirname(google.__file__), |
|
456 VERSION_FILE) |
|
457 if not isfile(version_filename): |
|
458 logging.error("Could not find version file at %s", version_filename) |
|
459 return None |
|
460 |
|
461 version_fh = open_fn(version_filename, "r") |
|
462 try: |
|
463 version = yaml.safe_load(version_fh) |
|
464 finally: |
|
465 version_fh.close() |
|
466 |
|
467 return version |
|
468 |
|
469 |
|
470 class UpdateCheck(object): |
|
471 """Determines if the local SDK is the latest version. |
|
472 |
|
473 Nags the user when there are updates to the SDK. As the SDK becomes |
|
474 more out of date, the language in the nagging gets stronger. We |
|
475 store a little yaml file in the user's home directory so that we nag |
|
476 the user only once a week. |
|
477 |
|
478 The yaml file has the following field: |
|
479 'timestamp': Last time we nagged the user in seconds since the epoch. |
|
480 |
|
481 Attributes: |
|
482 server: An AbstractRpcServer instance used to check for the latest SDK. |
|
483 config: The app's AppInfoExternal. Needed to determine which api_version |
|
484 the app is using. |
|
485 """ |
|
486 |
|
487 def __init__(self, |
|
488 server, |
|
489 config, |
|
490 isdir=os.path.isdir, |
|
491 isfile=os.path.isfile, |
|
492 open_fn=open): |
|
493 """Create a new UpdateCheck. |
|
494 |
|
495 Args: |
|
496 server: The AbstractRpcServer to use. |
|
497 config: The yaml object that specifies the configuration of this |
|
498 application. |
|
499 |
|
500 Args for testing: |
|
501 isdir: Replacement for os.path.isdir. |
|
502 isfile: Replacement for os.path.isfile. |
|
503 open: Replacement for the open builtin. |
|
504 """ |
|
505 self.server = server |
|
506 self.config = config |
|
507 self.isdir = isdir |
|
508 self.isfile = isfile |
|
509 self.open = open_fn |
|
510 |
|
511 @staticmethod |
|
512 def MakeNagFilename(): |
|
513 """Returns the filename for the nag file for this user.""" |
|
514 user_homedir = os.path.expanduser("~/") |
|
515 if not os.path.isdir(user_homedir): |
|
516 drive, tail = os.path.splitdrive(os.__file__) |
|
517 if drive: |
|
518 os.environ["HOMEDRIVE"] = drive |
|
519 |
|
520 return os.path.expanduser("~/" + NAG_FILE) |
|
521 |
|
522 def _ParseVersionFile(self): |
|
523 """Parse the local VERSION file. |
|
524 |
|
525 Returns: |
|
526 A Yaml object or None if the file does not exist. |
|
527 """ |
|
528 return GetVersionObject(isfile=self.isfile, open_fn=self.open) |
|
529 |
|
530 def CheckSupportedVersion(self): |
|
531 """Determines if the app's api_version is supported by the SDK. |
|
532 |
|
533 Uses the api_version field from the AppInfoExternal to determine if |
|
534 the SDK supports that api_version. |
|
535 |
|
536 Raises: |
|
537 SystemExit if the api_version is not supported. |
|
538 """ |
|
539 version = self._ParseVersionFile() |
|
540 if version is None: |
|
541 logging.error("Could not determine if the SDK supports the api_version " |
|
542 "requested in app.yaml.") |
|
543 return |
|
544 if self.config.api_version not in version["api_versions"]: |
|
545 logging.critical("The api_version specified in app.yaml (%s) is not " |
|
546 "supported by this release of the SDK. The supported " |
|
547 "api_versions are %s.", |
|
548 self.config.api_version, version["api_versions"]) |
|
549 sys.exit(1) |
|
550 |
|
551 def CheckForUpdates(self): |
|
552 """Queries the server for updates and nags the user if appropriate. |
|
553 |
|
554 Queries the server for the latest SDK version at the same time reporting |
|
555 the local SDK version. The server will respond with a yaml document |
|
556 containing the fields: |
|
557 "release": The name of the release (e.g. 1.2). |
|
558 "timestamp": The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ). |
|
559 "api_versions": A list of api_version strings (e.g. ['1', 'beta']). |
|
560 |
|
561 We will nag the user with increasing severity if: |
|
562 - There is a new release. |
|
563 - There is a new release with a new api_version. |
|
564 - There is a new release that does not support the api_version named in |
|
565 self.config. |
|
566 """ |
|
567 version = self._ParseVersionFile() |
|
568 if version is None: |
|
569 logging.info("Skipping update check") |
|
570 return |
|
571 logging.info("Checking for updates to the SDK.") |
|
572 |
|
573 try: |
|
574 response = self.server.Send("/api/updatecheck", |
|
575 timeout=UPDATE_CHECK_TIMEOUT, |
|
576 release=version["release"], |
|
577 timestamp=version["timestamp"], |
|
578 api_versions=version["api_versions"]) |
|
579 except urllib2.URLError, e: |
|
580 logging.info("Update check failed: %s", e) |
|
581 return |
|
582 |
|
583 latest = yaml.safe_load(response) |
|
584 if latest["release"] == version["release"]: |
|
585 logging.info("The SDK is up to date.") |
|
586 return |
|
587 |
|
588 api_versions = latest["api_versions"] |
|
589 if self.config.api_version not in api_versions: |
|
590 self._Nag( |
|
591 "The api version you are using (%s) is obsolete! You should\n" |
|
592 "upgrade your SDK and test that your code works with the new\n" |
|
593 "api version." % self.config.api_version, |
|
594 latest, version, force=True) |
|
595 return |
|
596 |
|
597 if self.config.api_version != api_versions[len(api_versions) - 1]: |
|
598 self._Nag( |
|
599 "The api version you are using (%s) is deprecated. You should\n" |
|
600 "upgrade your SDK to try the new functionality." % |
|
601 self.config.api_version, latest, version) |
|
602 return |
|
603 |
|
604 self._Nag("There is a new release of the SDK available.", |
|
605 latest, version) |
|
606 |
|
607 def _ParseNagFile(self): |
|
608 """Parses the nag file. |
|
609 |
|
610 Returns: |
|
611 A NagFile if the file was present else None. |
|
612 """ |
|
613 nag_filename = UpdateCheck.MakeNagFilename() |
|
614 if self.isfile(nag_filename): |
|
615 fh = self.open(nag_filename, "r") |
|
616 try: |
|
617 nag = NagFile.Load(fh) |
|
618 finally: |
|
619 fh.close() |
|
620 return nag |
|
621 return None |
|
622 |
|
623 def _WriteNagFile(self, nag): |
|
624 """Writes the NagFile to the user's nag file. |
|
625 |
|
626 If the destination path does not exist, this method will log an error |
|
627 and fail silently. |
|
628 |
|
629 Args: |
|
630 nag: The NagFile to write. |
|
631 """ |
|
632 nagfilename = UpdateCheck.MakeNagFilename() |
|
633 try: |
|
634 fh = self.open(nagfilename, "w") |
|
635 try: |
|
636 fh.write(nag.ToYAML()) |
|
637 finally: |
|
638 fh.close() |
|
639 except (OSError, IOError), e: |
|
640 logging.error("Could not write nag file to %s. Error: %s", nagfilename, e) |
|
641 |
|
642 def _Nag(self, msg, latest, version, force=False): |
|
643 """Prints a nag message and updates the nag file's timestamp. |
|
644 |
|
645 Because we don't want to nag the user everytime, we store a simple |
|
646 yaml document in the user's home directory. If the timestamp in this |
|
647 doc is over a week old, we'll nag the user. And when we nag the user, |
|
648 we update the timestamp in this doc. |
|
649 |
|
650 Args: |
|
651 msg: The formatted message to print to the user. |
|
652 latest: The yaml document received from the server. |
|
653 version: The local yaml version document. |
|
654 force: If True, always nag the user, ignoring the nag file. |
|
655 """ |
|
656 nag = self._ParseNagFile() |
|
657 if nag and not force: |
|
658 last_nag = datetime.datetime.fromtimestamp(nag.timestamp) |
|
659 if datetime.datetime.now() - last_nag < datetime.timedelta(weeks=1): |
|
660 logging.debug("Skipping nag message") |
|
661 return |
|
662 |
|
663 if nag is None: |
|
664 nag = NagFile() |
|
665 nag.timestamp = time.time() |
|
666 self._WriteNagFile(nag) |
|
667 |
|
668 print "****************************************************************" |
|
669 print msg |
|
670 print "-----------" |
|
671 print "Latest SDK:" |
|
672 print yaml.dump(latest) |
|
673 print "-----------" |
|
674 print "Your SDK:" |
|
675 print yaml.dump(version) |
|
676 print "-----------" |
|
677 print "Please visit http://code.google.com/appengine for the latest SDK" |
|
678 print "****************************************************************" |
|
679 |
|
680 def AllowedToCheckForUpdates(self, input_fn=raw_input): |
|
681 """Determines if the user wants to check for updates. |
|
682 |
|
683 On startup, the dev_appserver wants to check for updates to the SDK. |
|
684 Because this action reports usage to Google when the user is not |
|
685 otherwise communicating with Google (e.g. pushing a new app version), |
|
686 the user must opt in. |
|
687 |
|
688 If the user does not have a nag file, we will query the user and |
|
689 save the response in the nag file. Subsequent calls to this function |
|
690 will re-use that response. |
|
691 |
|
692 Returns: |
|
693 True if the user wants to check for updates. False otherwise. |
|
694 """ |
|
695 nag = self._ParseNagFile() |
|
696 if nag is None: |
|
697 nag = NagFile() |
|
698 nag.timestamp = time.time() |
|
699 |
|
700 if nag.opt_in is None: |
|
701 answer = input_fn("Allow dev_appserver to check for updates on startup? " |
|
702 "(Y/n): ") |
|
703 answer = answer.strip().lower() |
|
704 if answer == "n" or answer == "no": |
|
705 print ("dev_appserver will not check for updates on startup. To " |
|
706 "change this setting, edit %s" % UpdateCheck.MakeNagFilename()) |
|
707 nag.opt_in = False |
|
708 else: |
|
709 print ("dev_appserver will check for updates on startup. To change " |
|
710 "this setting, edit %s" % UpdateCheck.MakeNagFilename()) |
|
711 nag.opt_in = True |
|
712 self._WriteNagFile(nag) |
|
713 return nag.opt_in |
|
714 |
|
715 |
|
716 class IndexDefinitionUpload(object): |
|
717 """Provides facilities to upload index definitions to the hosting service.""" |
|
718 |
|
719 def __init__(self, server, config, definitions): |
|
720 """Creates a new DatastoreIndexUpload. |
|
721 |
|
722 Args: |
|
723 server: The RPC server to use. Should be an instance of HttpRpcServer |
|
724 or TestRpcServer. |
|
725 config: The AppInfoExternal object derived from the app.yaml file. |
|
726 definitions: An IndexDefinitions object. |
|
727 """ |
|
728 self.server = server |
|
729 self.config = config |
|
730 self.definitions = definitions |
|
731 |
|
732 def DoUpload(self): |
|
733 StatusUpdate("Uploading index definitions.") |
|
734 self.server.Send("/api/datastore/index/add", |
|
735 app_id=self.config.application, |
|
736 version=self.config.version, |
|
737 payload=self.definitions.ToYAML()) |
|
738 |
|
739 |
|
740 class IndexOperation(object): |
|
741 """Provide facilities for writing Index operation commands.""" |
|
742 |
|
743 def __init__(self, server, config): |
|
744 """Creates a new IndexOperation. |
|
745 |
|
746 Args: |
|
747 server: The RPC server to use. Should be an instance of HttpRpcServer |
|
748 or TestRpcServer. |
|
749 config: appinfo.AppInfoExternal configuration object. |
|
750 """ |
|
751 self.server = server |
|
752 self.config = config |
|
753 |
|
754 def DoDiff(self, definitions): |
|
755 """Retrieve diff file from the server. |
|
756 |
|
757 Args: |
|
758 definitions: datastore_index.IndexDefinitions as loaded from users |
|
759 index.yaml file. |
|
760 |
|
761 Returns: |
|
762 A pair of datastore_index.IndexDefinitions objects. The first record |
|
763 is the set of indexes that are present in the index.yaml file but missing |
|
764 from the server. The second record is the set of indexes that are |
|
765 present on the server but missing from the index.yaml file (indicating |
|
766 that these indexes should probably be vacuumed). |
|
767 """ |
|
768 StatusUpdate("Fetching index definitions diff.") |
|
769 response = self.server.Send("/api/datastore/index/diff", |
|
770 app_id=self.config.application, |
|
771 payload=definitions.ToYAML()) |
|
772 return datastore_index.ParseMultipleIndexDefinitions(response) |
|
773 |
|
774 def DoDelete(self, definitions): |
|
775 """Delete indexes from the server. |
|
776 |
|
777 Args: |
|
778 definitions: Index definitions to delete from datastore. |
|
779 |
|
780 Returns: |
|
781 A single datstore_index.IndexDefinitions containing indexes that were |
|
782 not deleted, probably because they were already removed. This may |
|
783 be normal behavior as there is a potential race condition between fetching |
|
784 the index-diff and sending deletion confirmation through. |
|
785 """ |
|
786 StatusUpdate("Deleting selected index definitions.") |
|
787 response = self.server.Send("/api/datastore/index/delete", |
|
788 app_id=self.config.application, |
|
789 payload=definitions.ToYAML()) |
|
790 return datastore_index.ParseIndexDefinitions(response) |
|
791 |
|
792 |
|
793 class VacuumIndexesOperation(IndexOperation): |
|
794 """Provide facilities to request the deletion of datastore indexes.""" |
|
795 |
|
796 def __init__(self, server, config, force, |
|
797 confirmation_fn=raw_input): |
|
798 """Creates a new VacuumIndexesOperation. |
|
799 |
|
800 Args: |
|
801 server: The RPC server to use. Should be an instance of HttpRpcServer |
|
802 or TestRpcServer. |
|
803 config: appinfo.AppInfoExternal configuration object. |
|
804 force: True to force deletion of indexes, else False. |
|
805 confirmation_fn: Function used for getting input form user. |
|
806 """ |
|
807 super(VacuumIndexesOperation, self).__init__(server, config) |
|
808 self.force = force |
|
809 self.confirmation_fn = confirmation_fn |
|
810 |
|
811 def GetConfirmation(self, index): |
|
812 """Get confirmation from user to delete an index. |
|
813 |
|
814 This method will enter an input loop until the user provides a |
|
815 response it is expecting. Valid input is one of three responses: |
|
816 |
|
817 y: Confirm deletion of index. |
|
818 n: Do not delete index. |
|
819 a: Delete all indexes without asking for further confirmation. |
|
820 |
|
821 If the user enters nothing at all, the default action is to skip |
|
822 that index and do not delete. |
|
823 |
|
824 If the user selects 'a', as a side effect, the 'force' flag is set. |
|
825 |
|
826 Args: |
|
827 index: Index to confirm. |
|
828 |
|
829 Returns: |
|
830 True if user enters 'y' or 'a'. False if user enter 'n'. |
|
831 """ |
|
832 while True: |
|
833 print "This index is no longer defined in your index.yaml file." |
|
834 print |
|
835 print index.ToYAML() |
|
836 print |
|
837 |
|
838 confirmation = self.confirmation_fn( |
|
839 "Are you sure you want to delete this index? (N/y/a): ") |
|
840 confirmation = confirmation.strip().lower() |
|
841 |
|
842 if confirmation == 'y': |
|
843 return True |
|
844 elif confirmation == 'n' or confirmation == '': |
|
845 return False |
|
846 elif confirmation == 'a': |
|
847 self.force = True |
|
848 return True |
|
849 else: |
|
850 print "Did not understand your response." |
|
851 |
|
852 def DoVacuum(self, definitions): |
|
853 """Vacuum indexes in datastore. |
|
854 |
|
855 This method will query the server to determine which indexes are not |
|
856 being used according to the user's local index.yaml file. Once it has |
|
857 made this determination, it confirms with the user which unused indexes |
|
858 should be deleted. Once confirmation for each index is receives, it |
|
859 deletes those indexes. |
|
860 |
|
861 Because another user may in theory delete the same indexes at the same |
|
862 time as the user, there is a potential race condition. In this rare cases, |
|
863 some of the indexes previously confirmed for deletion will not be found. |
|
864 The user is notified which indexes these were. |
|
865 |
|
866 Args: |
|
867 definitions: datastore_index.IndexDefinitions as loaded from users |
|
868 index.yaml file. |
|
869 """ |
|
870 new_indexes, unused_indexes = self.DoDiff(definitions) |
|
871 |
|
872 deletions = datastore_index.IndexDefinitions(indexes=[]) |
|
873 if unused_indexes.indexes is not None: |
|
874 for index in unused_indexes.indexes: |
|
875 if self.force or self.GetConfirmation(index): |
|
876 deletions.indexes.append(index) |
|
877 |
|
878 if len(deletions.indexes) > 0: |
|
879 not_deleted = self.DoDelete(deletions) |
|
880 |
|
881 if not_deleted.indexes: |
|
882 not_deleted_count = len(not_deleted.indexes) |
|
883 if not_deleted_count == 1: |
|
884 warning_message = ('An index was not deleted. Most likely this is ' |
|
885 'because it no longer exists.\n\n') |
|
886 else: |
|
887 warning_message = ('%d indexes were not deleted. Most likely this ' |
|
888 'is because they no longer exist.\n\n' |
|
889 % not_deleted_count) |
|
890 for index in not_deleted.indexes: |
|
891 warning_message = warning_message + index.ToYAML() |
|
892 logging.warning(warning_message) |
|
893 |
|
894 |
|
895 class LogsRequester(object): |
|
896 """Provide facilities to export request logs.""" |
|
897 |
|
898 def __init__(self, server, config, output_file, |
|
899 num_days, append, severity, now): |
|
900 """Constructor. |
|
901 |
|
902 Args: |
|
903 server: The RPC server to use. Should be an instance of HttpRpcServer |
|
904 or TestRpcServer. |
|
905 config: appinfo.AppInfoExternal configuration object. |
|
906 output_file: Output file name. |
|
907 num_days: Number of days worth of logs to export; 0 for all available. |
|
908 append: True if appending to an existing file. |
|
909 severity: App log severity to request (0-4); None for no app logs. |
|
910 now: POSIX timestamp used for calculating valid dates for num_days. |
|
911 """ |
|
912 self.server = server |
|
913 self.config = config |
|
914 self.output_file = output_file |
|
915 self.append = append |
|
916 self.num_days = num_days |
|
917 self.severity = severity |
|
918 self.version_id = self.config.version + ".1" |
|
919 self.sentinel = None |
|
920 self.write_mode = "w" |
|
921 if self.append: |
|
922 self.sentinel = FindSentinel(self.output_file) |
|
923 self.write_mode = "a" |
|
924 self.valid_dates = None |
|
925 if self.num_days: |
|
926 patterns = [] |
|
927 for i in xrange(self.num_days): |
|
928 then = time.gmtime(now - 24*3600 * i) |
|
929 patterns.append(re.escape(time.strftime("%d/%m/%Y", then))) |
|
930 self.valid_dates = re.compile(r"[^[]+\[(" + "|".join(patterns) + r"):") |
|
931 |
|
932 def DownloadLogs(self): |
|
933 """Download the requested logs. |
|
934 |
|
935 This will write the logs to the file designated by |
|
936 self.output_file, or to stdout if the filename is '-'. |
|
937 Multiple roundtrips to the server may be made. |
|
938 """ |
|
939 StatusUpdate("Downloading request logs for %s %s." % |
|
940 (self.config.application, self.version_id)) |
|
941 tf = tempfile.TemporaryFile() |
|
942 offset = None |
|
943 try: |
|
944 while True: |
|
945 try: |
|
946 offset = self.RequestLogLines(tf, offset) |
|
947 if not offset: |
|
948 break |
|
949 except KeyboardInterrupt: |
|
950 StatusUpdate("Keyboard interrupt; saving data downloaded so far.") |
|
951 break |
|
952 StatusUpdate("Copying request logs to %r." % self.output_file) |
|
953 if self.output_file == "-": |
|
954 of = sys.stdout |
|
955 else: |
|
956 try: |
|
957 of = open(self.output_file, self.write_mode) |
|
958 except IOError, err: |
|
959 StatusUpdate("Can't write %r: %s." % (self.output_file, err)) |
|
960 sys.exit(1) |
|
961 try: |
|
962 line_count = CopyReversedLines(tf, of) |
|
963 finally: |
|
964 of.flush() |
|
965 if of is not sys.stdout: |
|
966 of.close() |
|
967 finally: |
|
968 tf.close() |
|
969 StatusUpdate("Copied %d records." % line_count) |
|
970 |
|
971 def RequestLogLines(self, tf, offset): |
|
972 """Make a single roundtrip to the server. |
|
973 |
|
974 Args: |
|
975 tf: Writable binary stream to which the log lines returned by |
|
976 the server are written, stripped of headers, and excluding |
|
977 lines skipped due to self.sentinel or self.valid_dates filtering. |
|
978 offset: Offset string for a continued request; None for the first. |
|
979 |
|
980 Returns: |
|
981 The offset string to be used for the next request, if another |
|
982 request should be issued; or None, if not. |
|
983 """ |
|
984 logging.info("Request with offset %r.", offset) |
|
985 kwds = {'app_id': self.config.application, |
|
986 'version': self.version_id, |
|
987 'limit': 100, |
|
988 } |
|
989 if offset: |
|
990 kwds['offset'] = offset |
|
991 if self.severity is not None: |
|
992 kwds['severity'] = str(self.severity) |
|
993 response = self.server.Send("/api/request_logs", payload=None, **kwds) |
|
994 response = response.replace("\r", "\0") |
|
995 lines = response.splitlines() |
|
996 logging.info("Received %d bytes, %d records.", len(response), len(lines)) |
|
997 offset = None |
|
998 if lines and lines[0].startswith('#'): |
|
999 match = re.match(r'^#\s*next_offset=(\S+)\s*$', lines[0]) |
|
1000 del lines[0] |
|
1001 if match: |
|
1002 offset = match.group(1) |
|
1003 if lines and lines[-1].startswith('#'): |
|
1004 del lines[-1] |
|
1005 valid_dates = self.valid_dates |
|
1006 sentinel = self.sentinel |
|
1007 len_sentinel = None |
|
1008 if sentinel: |
|
1009 len_sentinel = len(sentinel) |
|
1010 for line in lines: |
|
1011 if ((sentinel and |
|
1012 line.startswith(sentinel) and |
|
1013 line[len_sentinel : len_sentinel+1] in ("", "\0")) or |
|
1014 (valid_dates and not valid_dates.match(line))): |
|
1015 return None |
|
1016 tf.write(line + '\n') |
|
1017 if not lines: |
|
1018 return None |
|
1019 return offset |
|
1020 |
|
1021 |
|
1022 def CopyReversedLines(input, output, blocksize=2**16): |
|
1023 r"""Copy lines from input stream to output stream in reverse order. |
|
1024 |
|
1025 As a special feature, null bytes in the input are turned into |
|
1026 newlines followed by tabs in the output, but these "sub-lines" |
|
1027 separated by null bytes are not reversed. E.g. If the input is |
|
1028 "A\0B\nC\0D\n", the output is "C\n\tD\nA\n\tB\n". |
|
1029 |
|
1030 Args: |
|
1031 input: A seekable stream open for reading in binary mode. |
|
1032 output: A stream open for writing; doesn't have to be seekable or binary. |
|
1033 blocksize: Optional block size for buffering, for unit testing. |
|
1034 |
|
1035 Returns: |
|
1036 The number of lines copied. |
|
1037 """ |
|
1038 line_count = 0 |
|
1039 input.seek(0, 2) |
|
1040 last_block = input.tell() // blocksize |
|
1041 spillover = "" |
|
1042 for iblock in xrange(last_block + 1, -1, -1): |
|
1043 input.seek(iblock * blocksize) |
|
1044 data = input.read(blocksize) |
|
1045 lines = data.splitlines(True) |
|
1046 lines[-1:] = "".join(lines[-1:] + [spillover]).splitlines(True) |
|
1047 if lines and not lines[-1].endswith("\n"): |
|
1048 lines[-1] += "\n" |
|
1049 lines.reverse() |
|
1050 if lines and iblock > 0: |
|
1051 spillover = lines.pop() |
|
1052 if lines: |
|
1053 line_count += len(lines) |
|
1054 data = "".join(lines).replace("\0", "\n\t") |
|
1055 output.write(data) |
|
1056 return line_count |
|
1057 |
|
1058 |
|
1059 def FindSentinel(filename, blocksize=2**16): |
|
1060 """Return the sentinel line from the output file. |
|
1061 |
|
1062 Args: |
|
1063 filename: The filename of the output file. (We'll read this file.) |
|
1064 blocksize: Optional block size for buffering, for unit testing. |
|
1065 |
|
1066 Returns: |
|
1067 The contents of the last line in the file that doesn't start with |
|
1068 a tab, with its trailing newline stripped; or None if the file |
|
1069 couldn't be opened or no such line could be found by inspecting |
|
1070 the last 'blocksize' bytes of the file. |
|
1071 """ |
|
1072 if filename == "-": |
|
1073 StatusUpdate("Can't combine --append with output to stdout.") |
|
1074 sys.exit(2) |
|
1075 try: |
|
1076 fp = open(filename, "rb") |
|
1077 except IOError, err: |
|
1078 StatusUpdate("Append mode disabled: can't read %r: %s." % (filename, err)) |
|
1079 return None |
|
1080 try: |
|
1081 fp.seek(0, 2) |
|
1082 fp.seek(max(0, fp.tell() - blocksize)) |
|
1083 lines = fp.readlines() |
|
1084 del lines[:1] |
|
1085 sentinel = None |
|
1086 for line in lines: |
|
1087 if not line.startswith("\t"): |
|
1088 sentinel = line |
|
1089 if not sentinel: |
|
1090 StatusUpdate("Append mode disabled: can't find sentinel in %r." % |
|
1091 filename) |
|
1092 return None |
|
1093 return sentinel.rstrip("\n") |
|
1094 finally: |
|
1095 fp.close() |
|
1096 |
|
1097 |
|
1098 class AppVersionUpload(object): |
|
1099 """Provides facilities to upload a new appversion to the hosting service. |
|
1100 |
|
1101 Attributes: |
|
1102 server: The AbstractRpcServer to use for the upload. |
|
1103 config: The AppInfoExternal object derived from the app.yaml file. |
|
1104 app_id: The application string from 'config'. |
|
1105 version: The version string from 'config'. |
|
1106 files: A dictionary of files to upload to the server, mapping path to |
|
1107 hash of the file contents. |
|
1108 in_transaction: True iff a transaction with the server has started. |
|
1109 An AppVersionUpload can do only one transaction at a time. |
|
1110 """ |
|
1111 |
|
1112 def __init__(self, server, config): |
|
1113 """Creates a new AppVersionUpload. |
|
1114 |
|
1115 Args: |
|
1116 server: The RPC server to use. Should be an instance of HttpRpcServer or |
|
1117 TestRpcServer. |
|
1118 config: An AppInfoExternal object that specifies the configuration for |
|
1119 this application. |
|
1120 """ |
|
1121 self.server = server |
|
1122 self.config = config |
|
1123 self.app_id = self.config.application |
|
1124 self.version = self.config.version |
|
1125 self.files = {} |
|
1126 self.in_transaction = False |
|
1127 |
|
1128 def _Hash(self, content): |
|
1129 """Compute the hash of the content. |
|
1130 |
|
1131 Arg: |
|
1132 content: The data to hash as a string. |
|
1133 |
|
1134 Returns: |
|
1135 The string representation of the hash. |
|
1136 """ |
|
1137 h = sha.new(content).hexdigest() |
|
1138 return '%s_%s_%s_%s_%s' % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40]) |
|
1139 |
|
1140 def AddFile(self, path, file_handle): |
|
1141 """Adds the provided file to the list to be pushed to the server. |
|
1142 |
|
1143 Args: |
|
1144 path: The path the file should be uploaded as. |
|
1145 file_handle: A stream containing data to upload. |
|
1146 """ |
|
1147 assert not self.in_transaction, "Already in a transaction." |
|
1148 assert file_handle is not None |
|
1149 |
|
1150 reason = appinfo.ValidFilename(path) |
|
1151 if reason != '': |
|
1152 logging.error(reason) |
|
1153 return |
|
1154 |
|
1155 pos = file_handle.tell() |
|
1156 content_hash = self._Hash(file_handle.read()) |
|
1157 file_handle.seek(pos, 0) |
|
1158 |
|
1159 self.files[path] = content_hash |
|
1160 |
|
1161 def Begin(self): |
|
1162 """Begins the transaction, returning a list of files that need uploading. |
|
1163 |
|
1164 All calls to AddFile must be made before calling Begin(). |
|
1165 |
|
1166 Returns: |
|
1167 A list of pathnames for files that should be uploaded using UploadFile() |
|
1168 before Commit() can be called. |
|
1169 """ |
|
1170 assert not self.in_transaction, "Already in a transaction." |
|
1171 |
|
1172 StatusUpdate("Initiating update.") |
|
1173 self.server.Send("/api/appversion/create", app_id=self.app_id, |
|
1174 version=self.version, payload=self.config.ToYAML()) |
|
1175 self.in_transaction = True |
|
1176 |
|
1177 files_to_clone = [] |
|
1178 blobs_to_clone = [] |
|
1179 for path, content_hash in self.files.iteritems(): |
|
1180 mime_type = GetMimeTypeIfStaticFile(self.config, path) |
|
1181 if mime_type is not None: |
|
1182 blobs_to_clone.append((path, content_hash, mime_type)) |
|
1183 else: |
|
1184 files_to_clone.append((path, content_hash)) |
|
1185 |
|
1186 files_to_upload = {} |
|
1187 |
|
1188 def CloneFiles(url, files, file_type): |
|
1189 if len(files) == 0: |
|
1190 return |
|
1191 |
|
1192 StatusUpdate("Cloning %d %s file%s." % |
|
1193 (len(files), file_type, len(files) != 1 and "s" or "")) |
|
1194 for i in xrange(0, len(files), MAX_FILES_TO_CLONE): |
|
1195 if i > 0 and i % MAX_FILES_TO_CLONE == 0: |
|
1196 StatusUpdate("Cloned %d files." % i) |
|
1197 |
|
1198 chunk = files[i:min(len(files), i + MAX_FILES_TO_CLONE)] |
|
1199 result = self.server.Send(url, |
|
1200 app_id=self.app_id, version=self.version, |
|
1201 payload=BuildClonePostBody(chunk)) |
|
1202 if result: |
|
1203 files_to_upload.update(dict( |
|
1204 (f, self.files[f]) for f in result.split(LIST_DELIMITER))) |
|
1205 |
|
1206 CloneFiles("/api/appversion/cloneblobs", blobs_to_clone, "static") |
|
1207 CloneFiles("/api/appversion/clonefiles", files_to_clone, "application") |
|
1208 |
|
1209 logging.info('Files to upload: ' + str(files_to_upload)) |
|
1210 |
|
1211 self.files = files_to_upload |
|
1212 return sorted(files_to_upload.iterkeys()) |
|
1213 |
|
1214 def UploadFile(self, path, file_handle): |
|
1215 """Uploads a file to the hosting service. |
|
1216 |
|
1217 Must only be called after Begin(). |
|
1218 The path provided must be one of those that were returned by Begin(). |
|
1219 |
|
1220 Args: |
|
1221 path: The path the file is being uploaded as. |
|
1222 file_handle: A file-like object containing the data to upload. |
|
1223 |
|
1224 Raises: |
|
1225 KeyError: The provided file is not amongst those to be uploaded. |
|
1226 """ |
|
1227 assert self.in_transaction, "Begin() must be called before UploadFile()." |
|
1228 if path not in self.files: |
|
1229 raise KeyError("File '%s' is not in the list of files to be uploaded." |
|
1230 % path) |
|
1231 |
|
1232 del self.files[path] |
|
1233 mime_type = GetMimeTypeIfStaticFile(self.config, path) |
|
1234 if mime_type is not None: |
|
1235 self.server.Send("/api/appversion/addblob", app_id=self.app_id, |
|
1236 version=self.version, path=path, content_type=mime_type, |
|
1237 payload=file_handle.read()) |
|
1238 else: |
|
1239 self.server.Send("/api/appversion/addfile", app_id=self.app_id, |
|
1240 version=self.version, path=path, |
|
1241 payload=file_handle.read()) |
|
1242 |
|
1243 def Commit(self): |
|
1244 """Commits the transaction, making the new app version available. |
|
1245 |
|
1246 All the files returned by Begin() must have been uploaded with UploadFile() |
|
1247 before Commit() can be called. |
|
1248 |
|
1249 Raises: |
|
1250 Exception: Some required files were not uploaded. |
|
1251 """ |
|
1252 assert self.in_transaction, "Begin() must be called before Commit()." |
|
1253 if self.files: |
|
1254 raise Exception("Not all required files have been uploaded.") |
|
1255 |
|
1256 StatusUpdate("Closing update.") |
|
1257 self.server.Send("/api/appversion/commit", app_id=self.app_id, |
|
1258 version=self.version) |
|
1259 self.in_transaction = False |
|
1260 |
|
1261 def Rollback(self): |
|
1262 """Rolls back the transaction if one is in progress.""" |
|
1263 if not self.in_transaction: |
|
1264 return |
|
1265 StatusUpdate("Rolling back the update.") |
|
1266 self.server.Send("/api/appversion/rollback", app_id=self.app_id, |
|
1267 version=self.version) |
|
1268 self.in_transaction = False |
|
1269 self.files = {} |
|
1270 |
|
1271 def DoUpload(self, paths, max_size, openfunc): |
|
1272 """Uploads a new appversion with the given config and files to the server. |
|
1273 |
|
1274 Args: |
|
1275 paths: An iterator that yields the relative paths of the files to upload. |
|
1276 max_size: The maximum size file to upload. |
|
1277 openfunc: A function that takes a path and returns a file-like object. |
|
1278 """ |
|
1279 logging.info("Reading app configuration.") |
|
1280 |
|
1281 try: |
|
1282 StatusUpdate("Scanning files on local disk.") |
|
1283 num_files = 0 |
|
1284 for path in paths: |
|
1285 file_handle = openfunc(path) |
|
1286 try: |
|
1287 if self.config.skip_files.match(path): |
|
1288 logging.info("Ignoring file '%s': File matches ignore regex.", |
|
1289 path) |
|
1290 else: |
|
1291 file_length = GetFileLength(file_handle) |
|
1292 if file_length > max_size: |
|
1293 logging.error("Ignoring file '%s': Too long " |
|
1294 "(max %d bytes, file is %d bytes)", |
|
1295 path, max_size, file_length) |
|
1296 else: |
|
1297 logging.info("Processing file '%s'", path) |
|
1298 self.AddFile(path, file_handle) |
|
1299 finally: |
|
1300 file_handle.close() |
|
1301 num_files += 1 |
|
1302 if num_files % 500 == 0: |
|
1303 StatusUpdate("Scanned %d files." % num_files) |
|
1304 except KeyboardInterrupt: |
|
1305 logging.info("User interrupted. Aborting.") |
|
1306 raise |
|
1307 except EnvironmentError, e: |
|
1308 logging.error("An error occurred processing file '%s': %s. Aborting.", |
|
1309 path, e) |
|
1310 raise |
|
1311 |
|
1312 try: |
|
1313 missing_files = self.Begin() |
|
1314 if len(missing_files) > 0: |
|
1315 StatusUpdate("Uploading %d files." % len(missing_files)) |
|
1316 num_files = 0 |
|
1317 for missing_file in missing_files: |
|
1318 logging.info("Uploading file '%s'" % missing_file) |
|
1319 file_handle = openfunc(missing_file) |
|
1320 try: |
|
1321 self.UploadFile(missing_file, file_handle) |
|
1322 finally: |
|
1323 file_handle.close() |
|
1324 num_files += 1 |
|
1325 if num_files % 500 == 0: |
|
1326 StatusUpdate("Uploaded %d files." % num_files) |
|
1327 |
|
1328 self.Commit() |
|
1329 except KeyboardInterrupt: |
|
1330 logging.info("User interrupted. Aborting.") |
|
1331 self.Rollback() |
|
1332 raise |
|
1333 except: |
|
1334 logging.error("An unexpected error occurred. Aborting.") |
|
1335 self.Rollback() |
|
1336 raise |
|
1337 |
|
1338 logging.info("Done!") |
|
1339 |
|
1340 |
|
1341 def FileIterator(base, separator=os.path.sep): |
|
1342 """Walks a directory tree, returning all the files. Follows symlinks. |
|
1343 |
|
1344 Args: |
|
1345 base: The base path to search for files under. |
|
1346 separator: Path separator used by the running system's platform. |
|
1347 |
|
1348 Yields: |
|
1349 Paths of files found, relative to base. |
|
1350 """ |
|
1351 dirs = [""] |
|
1352 while dirs: |
|
1353 current_dir = dirs.pop() |
|
1354 for entry in os.listdir(os.path.join(base, current_dir)): |
|
1355 name = os.path.join(current_dir, entry) |
|
1356 fullname = os.path.join(base, name) |
|
1357 if os.path.isfile(fullname): |
|
1358 if separator == "\\": |
|
1359 name = name.replace("\\", "/") |
|
1360 yield name |
|
1361 elif os.path.isdir(fullname): |
|
1362 dirs.append(name) |
|
1363 |
|
1364 |
|
1365 def GetFileLength(fh): |
|
1366 """Returns the length of the file represented by fh. |
|
1367 |
|
1368 This function is capable of finding the length of any seekable stream, |
|
1369 unlike os.fstat, which only works on file streams. |
|
1370 |
|
1371 Args: |
|
1372 fh: The stream to get the length of. |
|
1373 |
|
1374 Returns: |
|
1375 The length of the stream. |
|
1376 """ |
|
1377 pos = fh.tell() |
|
1378 fh.seek(0, 2) |
|
1379 length = fh.tell() |
|
1380 fh.seek(pos, 0) |
|
1381 return length |
|
1382 |
|
1383 |
|
1384 def GetPlatformToken(os_module=os, sys_module=sys, platform=sys.platform): |
|
1385 """Returns a 'User-agent' token for the host system platform. |
|
1386 |
|
1387 Args: |
|
1388 os_module, sys_module, platform: Used for testing. |
|
1389 |
|
1390 Returns: |
|
1391 String containing the platform token for the host system. |
|
1392 """ |
|
1393 if hasattr(sys_module, "getwindowsversion"): |
|
1394 windows_version = sys_module.getwindowsversion() |
|
1395 version_info = ".".join(str(i) for i in windows_version[:4]) |
|
1396 return platform + "/" + version_info |
|
1397 elif hasattr(os_module, "uname"): |
|
1398 uname = os_module.uname() |
|
1399 return "%s/%s" % (uname[0], uname[2]) |
|
1400 else: |
|
1401 return "unknown" |
|
1402 |
|
1403 |
|
1404 def GetUserAgent(get_version=GetVersionObject, get_platform=GetPlatformToken): |
|
1405 """Determines the value of the 'User-agent' header to use for HTTP requests. |
|
1406 |
|
1407 If the 'APPCFG_SDK_NAME' environment variable is present, that will be |
|
1408 used as the first product token in the user-agent. |
|
1409 |
|
1410 Args: |
|
1411 get_version, get_platform: Used for testing. |
|
1412 |
|
1413 Returns: |
|
1414 String containing the 'user-agent' header value, which includes the SDK |
|
1415 version, the platform information, and the version of Python; |
|
1416 e.g., "appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2". |
|
1417 """ |
|
1418 product_tokens = [] |
|
1419 |
|
1420 sdk_name = os.environ.get("APPCFG_SDK_NAME") |
|
1421 if sdk_name: |
|
1422 product_tokens.append(sdk_name) |
|
1423 else: |
|
1424 version = get_version() |
|
1425 if version is None: |
|
1426 release = "unknown" |
|
1427 else: |
|
1428 release = version["release"] |
|
1429 |
|
1430 product_tokens.append("appcfg_py/%s" % release) |
|
1431 |
|
1432 product_tokens.append(get_platform()) |
|
1433 |
|
1434 python_version = ".".join(str(i) for i in sys.version_info) |
|
1435 product_tokens.append("Python/%s" % python_version) |
|
1436 |
|
1437 return " ".join(product_tokens) |
|
1438 |
|
1439 |
|
1440 class AppCfgApp(object): |
|
1441 """Singleton class to wrap AppCfg tool functionality. |
|
1442 |
|
1443 This class is responsible for parsing the command line and executing |
|
1444 the desired action on behalf of the user. Processing files and |
|
1445 communicating with the server is handled by other classes. |
|
1446 |
|
1447 Attributes: |
|
1448 actions: A dictionary mapping action names to Action objects. |
|
1449 action: The Action specified on the command line. |
|
1450 parser: An instance of optparse.OptionParser. |
|
1451 options: The command line options parsed by 'parser'. |
|
1452 argv: The original command line as a list. |
|
1453 args: The positional command line args left over after parsing the options. |
|
1454 raw_input_fn: Function used for getting raw user input, like email. |
|
1455 password_input_fn: Function used for getting user password. |
|
1456 |
|
1457 Attributes for testing: |
|
1458 parser_class: The class to use for parsing the command line. Because |
|
1459 OptionsParser will exit the program when there is a parse failure, it |
|
1460 is nice to subclass OptionsParser and catch the error before exiting. |
|
1461 """ |
|
1462 |
|
1463 def __init__(self, argv, parser_class=optparse.OptionParser, |
|
1464 rpc_server_class=HttpRpcServer, |
|
1465 raw_input_fn=raw_input, |
|
1466 password_input_fn=getpass.getpass): |
|
1467 """Initializer. Parses the cmdline and selects the Action to use. |
|
1468 |
|
1469 Initializes all of the attributes described in the class docstring. |
|
1470 Prints help or error messages if there is an error parsing the cmdline. |
|
1471 |
|
1472 Args: |
|
1473 argv: The list of arguments passed to this program. |
|
1474 parser_class: Options parser to use for this application. |
|
1475 rpc_server_class: RPC server class to use for this application. |
|
1476 raw_input_fn: Function used for getting user email. |
|
1477 password_input_fn: Function used for getting user password. |
|
1478 """ |
|
1479 self.parser_class = parser_class |
|
1480 self.argv = argv |
|
1481 self.rpc_server_class = rpc_server_class |
|
1482 self.raw_input_fn = raw_input_fn |
|
1483 self.password_input_fn = password_input_fn |
|
1484 |
|
1485 self.parser = self._GetOptionParser() |
|
1486 for action in self.actions.itervalues(): |
|
1487 action.options(self, self.parser) |
|
1488 |
|
1489 self.options, self.args = self.parser.parse_args(argv[1:]) |
|
1490 |
|
1491 if len(self.args) < 1: |
|
1492 self._PrintHelpAndExit() |
|
1493 if self.args[0] not in self.actions: |
|
1494 self.parser.error("Unknown action '%s'\n%s" % |
|
1495 (self.args[0], self.parser.get_description())) |
|
1496 action_name = self.args.pop(0) |
|
1497 self.action = self.actions[action_name] |
|
1498 |
|
1499 self.parser, self.options = self._MakeSpecificParser(self.action) |
|
1500 |
|
1501 if self.options.help: |
|
1502 self._PrintHelpAndExit() |
|
1503 |
|
1504 if self.options.verbose == 2: |
|
1505 logging.getLogger().setLevel(logging.INFO) |
|
1506 elif self.options.verbose == 3: |
|
1507 logging.getLogger().setLevel(logging.DEBUG) |
|
1508 |
|
1509 global verbosity |
|
1510 verbosity = self.options.verbose |
|
1511 |
|
1512 def Run(self, error_fh=sys.stderr): |
|
1513 """Executes the requested action. |
|
1514 |
|
1515 Catches any HTTPErrors raised by the action and prints them to stderr. |
|
1516 |
|
1517 Args: |
|
1518 error_fh: Print any HTTPErrors to this file handle. |
|
1519 """ |
|
1520 try: |
|
1521 self.action.function(self) |
|
1522 except urllib2.HTTPError, e: |
|
1523 body = e.read() |
|
1524 print >>error_fh, ("Error %d: --- begin server output ---\n" |
|
1525 "%s\n--- end server output ---" % |
|
1526 (e.code, body.rstrip("\n"))) |
|
1527 except yaml_errors.EventListenerError, e: |
|
1528 print >>error_fh, ("Error parsing yaml file:\n%s" % e) |
|
1529 |
|
1530 def _GetActionDescriptions(self): |
|
1531 """Returns a formatted string containing the short_descs for all actions.""" |
|
1532 action_names = self.actions.keys() |
|
1533 action_names.sort() |
|
1534 desc = "" |
|
1535 for action_name in action_names: |
|
1536 desc += " %s: %s\n" % (action_name, self.actions[action_name].short_desc) |
|
1537 return desc |
|
1538 |
|
1539 def _GetOptionParser(self): |
|
1540 """Creates an OptionParser with generic usage and description strings. |
|
1541 |
|
1542 Returns: |
|
1543 An OptionParser instance. |
|
1544 """ |
|
1545 |
|
1546 class Formatter(optparse.IndentedHelpFormatter): |
|
1547 """Custom help formatter that does not reformat the description.""" |
|
1548 def format_description(self, description): |
|
1549 return description + "\n" |
|
1550 |
|
1551 desc = self._GetActionDescriptions() |
|
1552 desc = ("Action must be one of:\n%s" |
|
1553 "Use 'help <action>' for a detailed description.") % desc |
|
1554 |
|
1555 parser = self.parser_class(usage="%prog [options] <action>", |
|
1556 description=desc, |
|
1557 formatter=Formatter(), |
|
1558 conflict_handler="resolve") |
|
1559 parser.add_option("-h", "--help", action="store_true", |
|
1560 dest="help", help="Show the help message and exit.") |
|
1561 parser.add_option("-q", "--quiet", action="store_const", const=0, |
|
1562 dest="verbose", help="Print errors only.") |
|
1563 parser.add_option("-v", "--verbose", action="store_const", const=2, |
|
1564 dest="verbose", default=1, |
|
1565 help="Print info level logs.") |
|
1566 parser.add_option("--noisy", action="store_const", const=3, |
|
1567 dest="verbose", help="Print all logs.") |
|
1568 parser.add_option("-s", "--server", action="store", dest="server", |
|
1569 default="appengine.google.com", |
|
1570 metavar="SERVER", help="The server to upload to.") |
|
1571 parser.add_option("-e", "--email", action="store", dest="email", |
|
1572 metavar="EMAIL", default=None, |
|
1573 help="The username to use. Will prompt if omitted.") |
|
1574 parser.add_option("-H", "--host", action="store", dest="host", |
|
1575 metavar="HOST", default=None, |
|
1576 help="Overrides the Host header sent with all RPCs.") |
|
1577 parser.add_option("--no_cookies", action="store_false", |
|
1578 dest="save_cookies", default=True, |
|
1579 help="Do not save authentication cookies to local disk.") |
|
1580 parser.add_option("--passin", action="store_true", |
|
1581 dest="passin", default=False, |
|
1582 help="Read the login password from stdin.") |
|
1583 return parser |
|
1584 |
|
1585 def _MakeSpecificParser(self, action): |
|
1586 """Creates a new parser with documentation specific to 'action'. |
|
1587 |
|
1588 Args: |
|
1589 action: An Action instance to be used when initializing the new parser. |
|
1590 |
|
1591 Returns: |
|
1592 A tuple containing: |
|
1593 parser: An instance of OptionsParser customized to 'action'. |
|
1594 options: The command line options after re-parsing. |
|
1595 """ |
|
1596 parser = self._GetOptionParser() |
|
1597 parser.set_usage(action.usage) |
|
1598 parser.set_description("%s\n%s" % (action.short_desc, action.long_desc)) |
|
1599 action.options(self, parser) |
|
1600 options, args = parser.parse_args(self.argv[1:]) |
|
1601 return parser, options |
|
1602 |
|
1603 def _PrintHelpAndExit(self, exit_code=2): |
|
1604 """Prints the parser's help message and exits the program. |
|
1605 |
|
1606 Args: |
|
1607 exit_code: The integer code to pass to sys.exit(). |
|
1608 """ |
|
1609 self.parser.print_help() |
|
1610 sys.exit(exit_code) |
|
1611 |
|
1612 def _GetRpcServer(self): |
|
1613 """Returns an instance of an AbstractRpcServer. |
|
1614 |
|
1615 Returns: |
|
1616 A new AbstractRpcServer, on which RPC calls can be made. |
|
1617 """ |
|
1618 |
|
1619 def GetUserCredentials(): |
|
1620 """Prompts the user for a username and password.""" |
|
1621 email = self.options.email |
|
1622 if email is None: |
|
1623 email = self.raw_input_fn("Email: ") |
|
1624 |
|
1625 password_prompt = "Password for %s: " % email |
|
1626 if self.options.passin: |
|
1627 password = self.raw_input_fn(password_prompt) |
|
1628 else: |
|
1629 password = self.password_input_fn(password_prompt) |
|
1630 |
|
1631 return (email, password) |
|
1632 |
|
1633 if self.options.host and self.options.host == "localhost": |
|
1634 email = self.options.email |
|
1635 if email is None: |
|
1636 email = "test@example.com" |
|
1637 logging.info("Using debug user %s. Override with --email" % email) |
|
1638 server = self.rpc_server_class( |
|
1639 self.options.server, |
|
1640 lambda: (email, "password"), |
|
1641 host_override=self.options.host, |
|
1642 extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email}, |
|
1643 save_cookies=self.options.save_cookies) |
|
1644 server.authenticated = True |
|
1645 return server |
|
1646 |
|
1647 return self.rpc_server_class(self.options.server, GetUserCredentials, |
|
1648 host_override=self.options.host, |
|
1649 save_cookies=self.options.save_cookies) |
|
1650 |
|
1651 def _FindYaml(self, basepath, file_name): |
|
1652 """Find yaml files in application directory. |
|
1653 |
|
1654 Args: |
|
1655 basepath: Base application directory. |
|
1656 file_name: Filename without extension to search for. |
|
1657 |
|
1658 Returns: |
|
1659 Path to located yaml file if one exists, else None. |
|
1660 """ |
|
1661 if not os.path.isdir(basepath): |
|
1662 self.parser.error("Not a directory: %s" % basepath) |
|
1663 |
|
1664 for yaml_file in (file_name + '.yaml', file_name + '.yml'): |
|
1665 yaml_path = os.path.join(basepath, yaml_file) |
|
1666 if os.path.isfile(yaml_path): |
|
1667 return yaml_path |
|
1668 |
|
1669 return None |
|
1670 |
|
1671 def _ParseAppYaml(self, basepath): |
|
1672 """Parses the app.yaml file. |
|
1673 |
|
1674 Returns: |
|
1675 An AppInfoExternal object. |
|
1676 """ |
|
1677 appyaml_filename = self._FindYaml(basepath, "app") |
|
1678 if appyaml_filename is None: |
|
1679 self.parser.error("Directory does not contain an app.yaml " |
|
1680 "configuration file.") |
|
1681 |
|
1682 fh = open(appyaml_filename, "r") |
|
1683 try: |
|
1684 appyaml = appinfo.LoadSingleAppInfo(fh) |
|
1685 finally: |
|
1686 fh.close() |
|
1687 return appyaml |
|
1688 |
|
1689 def _ParseIndexYaml(self, basepath): |
|
1690 """Parses the index.yaml file. |
|
1691 |
|
1692 Returns: |
|
1693 A single parsed yaml file or None if the file does not exist. |
|
1694 """ |
|
1695 file_name = self._FindYaml(basepath, "index") |
|
1696 if file_name is not None: |
|
1697 fh = open(file_name, "r") |
|
1698 try: |
|
1699 index_defs = datastore_index.ParseIndexDefinitions(fh) |
|
1700 finally: |
|
1701 fh.close() |
|
1702 return index_defs |
|
1703 return None |
|
1704 |
|
1705 def Help(self): |
|
1706 """Prints help for a specific action. |
|
1707 |
|
1708 Expects self.args[0] to contain the name of the action in question. |
|
1709 Exits the program after printing the help message. |
|
1710 """ |
|
1711 if len(self.args) != 1 or self.args[0] not in self.actions: |
|
1712 self.parser.error("Expected a single action argument. Must be one of:\n" + |
|
1713 self._GetActionDescriptions()) |
|
1714 |
|
1715 action = self.actions[self.args[0]] |
|
1716 self.parser, options = self._MakeSpecificParser(action) |
|
1717 self._PrintHelpAndExit(exit_code=0) |
|
1718 |
|
1719 def Update(self): |
|
1720 """Updates and deploys a new appversion.""" |
|
1721 if len(self.args) != 1: |
|
1722 self.parser.error("Expected a single <directory> argument.") |
|
1723 |
|
1724 basepath = self.args[0] |
|
1725 appyaml = self._ParseAppYaml(basepath) |
|
1726 rpc_server = self._GetRpcServer() |
|
1727 |
|
1728 updatecheck = UpdateCheck(rpc_server, appyaml) |
|
1729 updatecheck.CheckForUpdates() |
|
1730 |
|
1731 appversion = AppVersionUpload(rpc_server, appyaml) |
|
1732 appversion.DoUpload(FileIterator(basepath), self.options.max_size, |
|
1733 lambda path: open(os.path.join(basepath, path), "rb")) |
|
1734 |
|
1735 index_defs = self._ParseIndexYaml(basepath) |
|
1736 if index_defs: |
|
1737 index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs) |
|
1738 index_upload.DoUpload() |
|
1739 |
|
1740 def _UpdateOptions(self, parser): |
|
1741 """Adds update-specific options to 'parser'. |
|
1742 |
|
1743 Args: |
|
1744 parser: An instance of OptionsParser. |
|
1745 """ |
|
1746 parser.add_option("-S", "--max_size", type="int", dest="max_size", |
|
1747 default=1048576, metavar="SIZE", |
|
1748 help="Maximum size of a file to upload.") |
|
1749 |
|
1750 def VacuumIndexes(self): |
|
1751 """Deletes unused indexes.""" |
|
1752 if len(self.args) != 1: |
|
1753 self.parser.error("Expected a single <directory> argument.") |
|
1754 |
|
1755 basepath = self.args[0] |
|
1756 config = self._ParseAppYaml(basepath) |
|
1757 |
|
1758 index_defs = self._ParseIndexYaml(basepath) |
|
1759 if index_defs is None: |
|
1760 index_defs = datastore_index.IndexDefinitions() |
|
1761 |
|
1762 rpc_server = self._GetRpcServer() |
|
1763 vacuum = VacuumIndexesOperation(rpc_server, |
|
1764 config, |
|
1765 self.options.force_delete) |
|
1766 vacuum.DoVacuum(index_defs) |
|
1767 |
|
1768 def _VacuumIndexesOptions(self, parser): |
|
1769 """Adds vacuum_indexes-specific options to 'parser'. |
|
1770 |
|
1771 Args: |
|
1772 parser: An instance of OptionsParser. |
|
1773 """ |
|
1774 parser.add_option("-f", "--force", action="store_true", dest="force_delete", |
|
1775 default=False, |
|
1776 help="Force deletion without being prompted.") |
|
1777 |
|
1778 def UpdateIndexes(self): |
|
1779 """Updates indexes.""" |
|
1780 if len(self.args) != 1: |
|
1781 self.parser.error("Expected a single <directory> argument.") |
|
1782 |
|
1783 basepath = self.args[0] |
|
1784 appyaml = self._ParseAppYaml(basepath) |
|
1785 rpc_server = self._GetRpcServer() |
|
1786 |
|
1787 index_defs = self._ParseIndexYaml(basepath) |
|
1788 if index_defs: |
|
1789 index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs) |
|
1790 index_upload.DoUpload() |
|
1791 |
|
1792 def Rollback(self): |
|
1793 """Does a rollback of any existing transaction for this app version.""" |
|
1794 if len(self.args) != 1: |
|
1795 self.parser.error("Expected a single <directory> argument.") |
|
1796 |
|
1797 basepath = self.args[0] |
|
1798 appyaml = self._ParseAppYaml(basepath) |
|
1799 |
|
1800 appversion = AppVersionUpload(self._GetRpcServer(), appyaml) |
|
1801 appversion.in_transaction = True |
|
1802 appversion.Rollback() |
|
1803 |
|
1804 def RequestLogs(self): |
|
1805 """Write request logs to a file.""" |
|
1806 if len(self.args) != 2: |
|
1807 self.parser.error( |
|
1808 "Expected a <directory> argument and an <output_file> argument.") |
|
1809 if (self.options.severity is not None and |
|
1810 not 0 <= self.options.severity <= MAX_LOG_LEVEL): |
|
1811 self.parser.error( |
|
1812 "Severity range is 0 (DEBUG) through %s (CRITICAL)." % MAX_LOG_LEVEL) |
|
1813 |
|
1814 if self.options.num_days is None: |
|
1815 self.options.num_days = int(not self.options.append) |
|
1816 basepath = self.args[0] |
|
1817 appyaml = self._ParseAppYaml(basepath) |
|
1818 rpc_server = self._GetRpcServer() |
|
1819 logs_requester = LogsRequester(rpc_server, appyaml, self.args[1], |
|
1820 self.options.num_days, |
|
1821 self.options.append, |
|
1822 self.options.severity, |
|
1823 time.time()) |
|
1824 logs_requester.DownloadLogs() |
|
1825 |
|
1826 def _RequestLogsOptions(self, parser): |
|
1827 """Ads request_logs-specific options to 'parser'. |
|
1828 |
|
1829 Args: |
|
1830 parser: An instance of OptionsParser. |
|
1831 """ |
|
1832 parser.add_option("-n", "--num_days", type="int", dest="num_days", |
|
1833 action="store", default=None, |
|
1834 help="Number of days worth of log data to get. " |
|
1835 "The cut-off point is midnight UTC. " |
|
1836 "Use 0 to get all available logs. " |
|
1837 "Default is 1, unless --append is also given; " |
|
1838 "then the default is 0.") |
|
1839 parser.add_option("-a", "--append", dest="append", |
|
1840 action="store_true", default=False, |
|
1841 help="Append to existing file.") |
|
1842 parser.add_option("--severity", type="int", dest="severity", |
|
1843 action="store", default=None, |
|
1844 help="Severity of app-level log messages to get. " |
|
1845 "The range is 0 (DEBUG) through 4 (CRITICAL). " |
|
1846 "If omitted, only request logs are returned.") |
|
1847 |
|
1848 class Action(object): |
|
1849 """Contains information about a command line action. |
|
1850 |
|
1851 Attributes: |
|
1852 function: An AppCfgApp function that will perform the appropriate |
|
1853 action. |
|
1854 usage: A command line usage string. |
|
1855 short_desc: A one-line description of the action. |
|
1856 long_desc: A detailed description of the action. Whitespace and |
|
1857 formatting will be preserved. |
|
1858 options: A function that will add extra options to a given OptionParser |
|
1859 object. |
|
1860 """ |
|
1861 |
|
1862 def __init__(self, function, usage, short_desc, long_desc="", |
|
1863 options=lambda obj, parser: None): |
|
1864 """Initializer for the class attributes.""" |
|
1865 self.function = function |
|
1866 self.usage = usage |
|
1867 self.short_desc = short_desc |
|
1868 self.long_desc = long_desc |
|
1869 self.options = options |
|
1870 |
|
1871 actions = { |
|
1872 |
|
1873 "help": Action( |
|
1874 function=Help, |
|
1875 usage="%prog help <action>", |
|
1876 short_desc="Print help for a specific action."), |
|
1877 |
|
1878 "update": Action( |
|
1879 function=Update, |
|
1880 usage="%prog [options] update <directory>", |
|
1881 options=_UpdateOptions, |
|
1882 short_desc="Create or update an app version.", |
|
1883 long_desc=""" |
|
1884 Specify a directory that contains all of the files required by |
|
1885 the app, and appcfg.py will create/update the app version referenced |
|
1886 in the app.yaml file at the top level of that directory. appcfg.py |
|
1887 will follow symlinks and recursively upload all files to the server. |
|
1888 Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""), |
|
1889 |
|
1890 "update_indexes": Action( |
|
1891 function=UpdateIndexes, |
|
1892 usage="%prog [options] update_indexes <directory>", |
|
1893 short_desc="Update application indexes.", |
|
1894 long_desc=""" |
|
1895 The 'update_indexes' command will add additional indexes which are not currently |
|
1896 in production as well as restart any indexes that were not completed."""), |
|
1897 |
|
1898 "vacuum_indexes": Action( |
|
1899 function=VacuumIndexes, |
|
1900 usage="%prog [options] vacuum_indexes <directory>", |
|
1901 options=_VacuumIndexesOptions, |
|
1902 short_desc="Delete unused indexes from application.", |
|
1903 long_desc=""" |
|
1904 The 'vacuum_indexes' command will help clean up indexes which are no longer |
|
1905 in use. It does this by comparing the local index configuration with |
|
1906 indexes that are actually defined on the server. If any indexes on the |
|
1907 server do not exist in the index configuration file, the user is given the |
|
1908 option to delete them."""), |
|
1909 |
|
1910 "rollback": Action( |
|
1911 function=Rollback, |
|
1912 usage="%prog [options] rollback <directory>", |
|
1913 short_desc="Rollback an in-progress update.", |
|
1914 long_desc=""" |
|
1915 The 'update' command requires a server-side transaction. Use 'rollback' |
|
1916 if you get an error message about another transaction being in progress |
|
1917 and you are sure that there is no such transaction."""), |
|
1918 |
|
1919 "request_logs": Action( |
|
1920 function=RequestLogs, |
|
1921 usage="%prog [options] request_logs <directory> <output_file>", |
|
1922 options=_RequestLogsOptions, |
|
1923 short_desc="Write request logs in Apache common log format.", |
|
1924 long_desc=""" |
|
1925 The 'request_logs' command exports the request logs from your application |
|
1926 to a file. It will write Apache common log format records ordered |
|
1927 chronologically. If output file is '-' stdout will be written."""), |
|
1928 |
|
1929 } |
|
1930 |
|
1931 |
|
1932 def main(argv): |
|
1933 logging.basicConfig(format=("%(asctime)s %(levelname)s %(filename)s:" |
|
1934 "%(lineno)s %(message)s ")) |
|
1935 try: |
|
1936 AppCfgApp(argv).Run() |
|
1937 except KeyboardInterrupt: |
|
1938 StatusUpdate("Interrupted.") |
|
1939 sys.exit(1) |
|
1940 |
|
1941 |
|
1942 if __name__ == "__main__": |
|
1943 main(sys.argv) |