changeset 828 | f5fd65cc3bf3 |
parent 686 | df109be0567c |
child 1278 | a7766286a7be |
827:88c186556a80 | 828:f5fd65cc3bf3 |
---|---|
27 methods to add to the list of files, fetch a list of modified files, upload |
27 methods to add to the list of files, fetch a list of modified files, upload |
28 files, and commit or rollback the transaction. |
28 files, and commit or rollback the transaction. |
29 """ |
29 """ |
30 |
30 |
31 |
31 |
32 import cookielib |
32 import calendar |
33 import datetime |
33 import datetime |
34 import getpass |
34 import getpass |
35 import logging |
35 import logging |
36 import mimetypes |
36 import mimetypes |
37 import optparse |
37 import optparse |
38 import os |
38 import os |
39 import re |
39 import re |
40 import sha |
40 import sha |
41 import socket |
|
42 import sys |
41 import sys |
43 import tempfile |
42 import tempfile |
44 import time |
43 import time |
45 import urllib |
|
46 import urllib2 |
44 import urllib2 |
47 |
45 |
48 import google |
46 import google |
47 import yaml |
|
48 from google.appengine.cron import groctimespecification |
|
49 from google.appengine.api import appinfo |
49 from google.appengine.api import appinfo |
50 from google.appengine.api import croninfo |
|
50 from google.appengine.api import validation |
51 from google.appengine.api import validation |
51 from google.appengine.api import yaml_errors |
52 from google.appengine.api import yaml_errors |
52 from google.appengine.api import yaml_object |
53 from google.appengine.api import yaml_object |
53 from google.appengine.datastore import datastore_index |
54 from google.appengine.datastore import datastore_index |
54 import yaml |
55 from google.appengine.tools import appengine_rpc |
55 |
56 |
56 |
57 |
57 MAX_FILES_TO_CLONE = 100 |
58 MAX_FILES_TO_CLONE = 100 |
58 LIST_DELIMITER = "\n" |
59 LIST_DELIMITER = "\n" |
59 TUPLE_DELIMITER = "|" |
60 TUPLE_DELIMITER = "|" |
65 NAG_FILE = ".appcfg_nag" |
66 NAG_FILE = ".appcfg_nag" |
66 |
67 |
67 MAX_LOG_LEVEL = 4 |
68 MAX_LOG_LEVEL = 4 |
68 |
69 |
69 verbosity = 1 |
70 verbosity = 1 |
71 |
|
72 |
|
73 appinfo.AppInfoExternal.ATTRIBUTES[appinfo.RUNTIME] = "python" |
|
70 |
74 |
71 |
75 |
72 def StatusUpdate(msg): |
76 def StatusUpdate(msg): |
73 """Print a status message to stderr. |
77 """Print a status message to stderr. |
74 |
78 |
77 Args: |
81 Args: |
78 msg: The string to print. |
82 msg: The string to print. |
79 """ |
83 """ |
80 if verbosity > 0: |
84 if verbosity > 0: |
81 print >>sys.stderr, msg |
85 print >>sys.stderr, msg |
82 |
|
83 |
|
84 class ClientLoginError(urllib2.HTTPError): |
|
85 """Raised to indicate there was an error authenticating with ClientLogin.""" |
|
86 |
|
87 def __init__(self, url, code, msg, headers, args): |
|
88 urllib2.HTTPError.__init__(self, url, code, msg, headers, None) |
|
89 self.args = args |
|
90 self.reason = args["Error"] |
|
91 |
|
92 |
|
93 class AbstractRpcServer(object): |
|
94 """Provides a common interface for a simple RPC server.""" |
|
95 |
|
96 def __init__(self, host, auth_function, host_override=None, |
|
97 extra_headers=None, save_cookies=False): |
|
98 """Creates a new HttpRpcServer. |
|
99 |
|
100 Args: |
|
101 host: The host to send requests to. |
|
102 auth_function: A function that takes no arguments and returns an |
|
103 (email, password) tuple when called. Will be called if authentication |
|
104 is required. |
|
105 host_override: The host header to send to the server (defaults to host). |
|
106 extra_headers: A dict of extra headers to append to every request. Values |
|
107 supplied here will override other default headers that are supplied. |
|
108 save_cookies: If True, save the authentication cookies to local disk. |
|
109 If False, use an in-memory cookiejar instead. Subclasses must |
|
110 implement this functionality. Defaults to False. |
|
111 """ |
|
112 self.host = host |
|
113 self.host_override = host_override |
|
114 self.auth_function = auth_function |
|
115 self.authenticated = False |
|
116 |
|
117 self.extra_headers = { |
|
118 "User-agent": GetUserAgent() |
|
119 } |
|
120 if extra_headers: |
|
121 self.extra_headers.update(extra_headers) |
|
122 |
|
123 self.save_cookies = save_cookies |
|
124 self.cookie_jar = cookielib.MozillaCookieJar() |
|
125 self.opener = self._GetOpener() |
|
126 if self.host_override: |
|
127 logging.info("Server: %s; Host: %s", self.host, self.host_override) |
|
128 else: |
|
129 logging.info("Server: %s", self.host) |
|
130 |
|
131 def _GetOpener(self): |
|
132 """Returns an OpenerDirector for making HTTP requests. |
|
133 |
|
134 Returns: |
|
135 A urllib2.OpenerDirector object. |
|
136 """ |
|
137 raise NotImplemented() |
|
138 |
|
139 def _CreateRequest(self, url, data=None): |
|
140 """Creates a new urllib request.""" |
|
141 logging.debug("Creating request for: '%s' with payload:\n%s", url, data) |
|
142 req = urllib2.Request(url, data=data) |
|
143 if self.host_override: |
|
144 req.add_header("Host", self.host_override) |
|
145 for key, value in self.extra_headers.iteritems(): |
|
146 req.add_header(key, value) |
|
147 return req |
|
148 |
|
149 def _GetAuthToken(self, email, password): |
|
150 """Uses ClientLogin to authenticate the user, returning an auth token. |
|
151 |
|
152 Args: |
|
153 email: The user's email address |
|
154 password: The user's password |
|
155 |
|
156 Raises: |
|
157 ClientLoginError: If there was an error authenticating with ClientLogin. |
|
158 HTTPError: If there was some other form of HTTP error. |
|
159 |
|
160 Returns: |
|
161 The authentication token returned by ClientLogin. |
|
162 """ |
|
163 req = self._CreateRequest( |
|
164 url="https://www.google.com/accounts/ClientLogin", |
|
165 data=urllib.urlencode({ |
|
166 "Email": email, |
|
167 "Passwd": password, |
|
168 "service": "ah", |
|
169 "source": "Google-appcfg-1.0", |
|
170 "accountType": "HOSTED_OR_GOOGLE" |
|
171 }) |
|
172 ) |
|
173 try: |
|
174 response = self.opener.open(req) |
|
175 response_body = response.read() |
|
176 response_dict = dict(x.split("=") |
|
177 for x in response_body.split("\n") if x) |
|
178 return response_dict["Auth"] |
|
179 except urllib2.HTTPError, e: |
|
180 if e.code == 403: |
|
181 body = e.read() |
|
182 response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) |
|
183 raise ClientLoginError(req.get_full_url(), e.code, e.msg, |
|
184 e.headers, response_dict) |
|
185 else: |
|
186 raise |
|
187 |
|
188 def _GetAuthCookie(self, auth_token): |
|
189 """Fetches authentication cookies for an authentication token. |
|
190 |
|
191 Args: |
|
192 auth_token: The authentication token returned by ClientLogin. |
|
193 |
|
194 Raises: |
|
195 HTTPError: If there was an error fetching the authentication cookies. |
|
196 """ |
|
197 continue_location = "http://localhost/" |
|
198 args = {"continue": continue_location, "auth": auth_token} |
|
199 login_path = os.environ.get("APPCFG_LOGIN_PATH", "/_ah") |
|
200 req = self._CreateRequest("http://%s%s/login?%s" % |
|
201 (self.host, login_path, urllib.urlencode(args))) |
|
202 try: |
|
203 response = self.opener.open(req) |
|
204 except urllib2.HTTPError, e: |
|
205 response = e |
|
206 if (response.code != 302 or |
|
207 response.info()["location"] != continue_location): |
|
208 raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, |
|
209 response.headers, response.fp) |
|
210 self.authenticated = True |
|
211 |
|
212 def _Authenticate(self): |
|
213 """Authenticates the user. |
|
214 |
|
215 The authentication process works as follows: |
|
216 1) We get a username and password from the user |
|
217 2) We use ClientLogin to obtain an AUTH token for the user |
|
218 (see http://code.google.com/apis/accounts/AuthForInstalledApps.html). |
|
219 3) We pass the auth token to /_ah/login on the server to obtain an |
|
220 authentication cookie. If login was successful, it tries to redirect |
|
221 us to the URL we provided. |
|
222 |
|
223 If we attempt to access the upload API without first obtaining an |
|
224 authentication cookie, it returns a 401 response and directs us to |
|
225 authenticate ourselves with ClientLogin. |
|
226 """ |
|
227 for i in range(3): |
|
228 credentials = self.auth_function() |
|
229 try: |
|
230 auth_token = self._GetAuthToken(credentials[0], credentials[1]) |
|
231 except ClientLoginError, e: |
|
232 if e.reason == "BadAuthentication": |
|
233 print >>sys.stderr, "Invalid username or password." |
|
234 continue |
|
235 if e.reason == "CaptchaRequired": |
|
236 print >>sys.stderr, ( |
|
237 "Please go to\n" |
|
238 "https://www.google.com/accounts/DisplayUnlockCaptcha\n" |
|
239 "and verify you are a human. Then try again.") |
|
240 break; |
|
241 if e.reason == "NotVerified": |
|
242 print >>sys.stderr, "Account not verified." |
|
243 break |
|
244 if e.reason == "TermsNotAgreed": |
|
245 print >>sys.stderr, "User has not agreed to TOS." |
|
246 break |
|
247 if e.reason == "AccountDeleted": |
|
248 print >>sys.stderr, "The user account has been deleted." |
|
249 break |
|
250 if e.reason == "AccountDisabled": |
|
251 print >>sys.stderr, "The user account has been disabled." |
|
252 break |
|
253 if e.reason == "ServiceDisabled": |
|
254 print >>sys.stderr, ("The user's access to the service has been " |
|
255 "disabled.") |
|
256 break |
|
257 if e.reason == "ServiceUnavailable": |
|
258 print >>sys.stderr, "The service is not available; try again later." |
|
259 break |
|
260 raise |
|
261 self._GetAuthCookie(auth_token) |
|
262 return |
|
263 |
|
264 def Send(self, request_path, payload="", |
|
265 content_type="application/octet-stream", |
|
266 timeout=None, |
|
267 **kwargs): |
|
268 """Sends an RPC and returns the response. |
|
269 |
|
270 Args: |
|
271 request_path: The path to send the request to, eg /api/appversion/create. |
|
272 payload: The body of the request, or None to send an empty request. |
|
273 content_type: The Content-Type header to use. |
|
274 timeout: timeout in seconds; default None i.e. no timeout. |
|
275 (Note: for large requests on OS X, the timeout doesn't work right.) |
|
276 kwargs: Any keyword arguments are converted into query string parameters. |
|
277 |
|
278 Returns: |
|
279 The response body, as a string. |
|
280 """ |
|
281 if not self.authenticated: |
|
282 self._Authenticate() |
|
283 |
|
284 old_timeout = socket.getdefaulttimeout() |
|
285 socket.setdefaulttimeout(timeout) |
|
286 try: |
|
287 tries = 0 |
|
288 while True: |
|
289 tries += 1 |
|
290 args = dict(kwargs) |
|
291 url = "http://%s%s?%s" % (self.host, request_path, |
|
292 urllib.urlencode(args)) |
|
293 req = self._CreateRequest(url=url, data=payload) |
|
294 req.add_header("Content-Type", content_type) |
|
295 req.add_header("X-appcfg-api-version", "1") |
|
296 try: |
|
297 f = self.opener.open(req) |
|
298 response = f.read() |
|
299 f.close() |
|
300 return response |
|
301 except urllib2.HTTPError, e: |
|
302 if tries > 3: |
|
303 raise |
|
304 elif e.code == 401: |
|
305 self._Authenticate() |
|
306 elif e.code >= 500 and e.code < 600: |
|
307 continue |
|
308 else: |
|
309 raise |
|
310 finally: |
|
311 socket.setdefaulttimeout(old_timeout) |
|
312 |
|
313 |
|
314 class HttpRpcServer(AbstractRpcServer): |
|
315 """Provides a simplified RPC-style interface for HTTP requests.""" |
|
316 |
|
317 DEFAULT_COOKIE_FILE_PATH = "~/.appcfg_cookies" |
|
318 |
|
319 def _Authenticate(self): |
|
320 """Save the cookie jar after authentication.""" |
|
321 super(HttpRpcServer, self)._Authenticate() |
|
322 if self.cookie_jar.filename is not None and self.save_cookies: |
|
323 StatusUpdate("Saving authentication cookies to %s" % |
|
324 self.cookie_jar.filename) |
|
325 self.cookie_jar.save() |
|
326 |
|
327 def _GetOpener(self): |
|
328 """Returns an OpenerDirector that supports cookies and ignores redirects. |
|
329 |
|
330 Returns: |
|
331 A urllib2.OpenerDirector object. |
|
332 """ |
|
333 opener = urllib2.OpenerDirector() |
|
334 opener.add_handler(urllib2.ProxyHandler()) |
|
335 opener.add_handler(urllib2.UnknownHandler()) |
|
336 opener.add_handler(urllib2.HTTPHandler()) |
|
337 opener.add_handler(urllib2.HTTPDefaultErrorHandler()) |
|
338 opener.add_handler(urllib2.HTTPSHandler()) |
|
339 opener.add_handler(urllib2.HTTPErrorProcessor()) |
|
340 |
|
341 if self.save_cookies: |
|
342 self.cookie_jar.filename = os.path.expanduser(HttpRpcServer.DEFAULT_COOKIE_FILE_PATH) |
|
343 |
|
344 if os.path.exists(self.cookie_jar.filename): |
|
345 try: |
|
346 self.cookie_jar.load() |
|
347 self.authenticated = True |
|
348 StatusUpdate("Loaded authentication cookies from %s" % |
|
349 self.cookie_jar.filename) |
|
350 except (OSError, IOError, cookielib.LoadError), e: |
|
351 logging.debug("Could not load authentication cookies; %s: %s", |
|
352 e.__class__.__name__, e) |
|
353 self.cookie_jar.filename = None |
|
354 else: |
|
355 try: |
|
356 fd = os.open(self.cookie_jar.filename, os.O_CREAT, 0600) |
|
357 os.close(fd) |
|
358 except (OSError, IOError), e: |
|
359 logging.debug("Could not create authentication cookies file; %s: %s", |
|
360 e.__class__.__name__, e) |
|
361 self.cookie_jar.filename = None |
|
362 |
|
363 opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) |
|
364 return opener |
|
365 |
86 |
366 |
87 |
367 def GetMimeTypeIfStaticFile(config, filename): |
88 def GetMimeTypeIfStaticFile(config, filename): |
368 """Looks up the mime type for 'filename'. |
89 """Looks up the mime type for 'filename'. |
369 |
90 |
425 opt_in: True if the user wants to check for updates on dev_appserver |
146 opt_in: True if the user wants to check for updates on dev_appserver |
426 start. False if not. May be None if we have not asked the user yet. |
147 start. False if not. May be None if we have not asked the user yet. |
427 """ |
148 """ |
428 |
149 |
429 ATTRIBUTES = { |
150 ATTRIBUTES = { |
430 "timestamp": validation.TYPE_FLOAT, |
151 "timestamp": validation.TYPE_FLOAT, |
431 "opt_in": validation.Optional(validation.TYPE_BOOL), |
152 "opt_in": validation.Optional(validation.TYPE_BOOL), |
432 } |
153 } |
433 |
154 |
434 @staticmethod |
155 @staticmethod |
435 def Load(nag_file): |
156 def Load(nag_file): |
436 """Load a single NagFile object where one and only one is expected. |
157 """Load a single NagFile object where one and only one is expected. |
446 |
167 |
447 def GetVersionObject(isfile=os.path.isfile, open_fn=open): |
168 def GetVersionObject(isfile=os.path.isfile, open_fn=open): |
448 """Gets the version of the SDK by parsing the VERSION file. |
169 """Gets the version of the SDK by parsing the VERSION file. |
449 |
170 |
450 Args: |
171 Args: |
451 isfile, open_fn: Used for testing. |
172 isfile: used for testing. |
173 open_fn: Used for testing. |
|
452 |
174 |
453 Returns: |
175 Returns: |
454 A Yaml object or None if the VERSION file does not exist. |
176 A Yaml object or None if the VERSION file does not exist. |
455 """ |
177 """ |
456 version_filename = os.path.join(os.path.dirname(google.__file__), |
178 version_filename = os.path.join(os.path.dirname(google.__file__), |
495 |
217 |
496 Args: |
218 Args: |
497 server: The AbstractRpcServer to use. |
219 server: The AbstractRpcServer to use. |
498 config: The yaml object that specifies the configuration of this |
220 config: The yaml object that specifies the configuration of this |
499 application. |
221 application. |
500 |
222 isdir: Replacement for os.path.isdir (for testing). |
501 Args for testing: |
223 isfile: Replacement for os.path.isfile (for testing). |
502 isdir: Replacement for os.path.isdir. |
224 open_fn: Replacement for the open builtin (for testing). |
503 isfile: Replacement for os.path.isfile. |
|
504 open: Replacement for the open builtin. |
|
505 """ |
225 """ |
506 self.server = server |
226 self.server = server |
507 self.config = config |
227 self.config = config |
508 self.isdir = isdir |
228 self.isdir = isdir |
509 self.isfile = isfile |
229 self.isfile = isfile |
512 @staticmethod |
232 @staticmethod |
513 def MakeNagFilename(): |
233 def MakeNagFilename(): |
514 """Returns the filename for the nag file for this user.""" |
234 """Returns the filename for the nag file for this user.""" |
515 user_homedir = os.path.expanduser("~/") |
235 user_homedir = os.path.expanduser("~/") |
516 if not os.path.isdir(user_homedir): |
236 if not os.path.isdir(user_homedir): |
517 drive, tail = os.path.splitdrive(os.__file__) |
237 drive, unused_tail = os.path.splitdrive(os.__file__) |
518 if drive: |
238 if drive: |
519 os.environ["HOMEDRIVE"] = drive |
239 os.environ["HOMEDRIVE"] = drive |
520 |
240 |
521 return os.path.expanduser("~/" + NAG_FILE) |
241 return os.path.expanduser("~/" + NAG_FILE) |
522 |
242 |
687 the user must opt in. |
407 the user must opt in. |
688 |
408 |
689 If the user does not have a nag file, we will query the user and |
409 If the user does not have a nag file, we will query the user and |
690 save the response in the nag file. Subsequent calls to this function |
410 save the response in the nag file. Subsequent calls to this function |
691 will re-use that response. |
411 will re-use that response. |
412 |
|
413 Args: |
|
414 input_fn: used to collect user input. This is for testing only. |
|
692 |
415 |
693 Returns: |
416 Returns: |
694 True if the user wants to check for updates. False otherwise. |
417 True if the user wants to check for updates. False otherwise. |
695 """ |
418 """ |
696 nag = self._ParseNagFile() |
419 nag = self._ParseNagFile() |
729 self.server = server |
452 self.server = server |
730 self.config = config |
453 self.config = config |
731 self.definitions = definitions |
454 self.definitions = definitions |
732 |
455 |
733 def DoUpload(self): |
456 def DoUpload(self): |
457 """Uploads the index definitions.""" |
|
734 StatusUpdate("Uploading index definitions.") |
458 StatusUpdate("Uploading index definitions.") |
735 self.server.Send("/api/datastore/index/add", |
459 self.server.Send("/api/datastore/index/add", |
736 app_id=self.config.application, |
460 app_id=self.config.application, |
737 version=self.config.version, |
461 version=self.config.version, |
738 payload=self.definitions.ToYAML()) |
462 payload=self.definitions.ToYAML()) |
463 |
|
464 |
|
465 class CronEntryUpload(object): |
|
466 """Provides facilities to upload cron entries to the hosting service.""" |
|
467 |
|
468 def __init__(self, server, config, cron): |
|
469 """Creates a new CronEntryUpload. |
|
470 |
|
471 Args: |
|
472 server: The RPC server to use. Should be an instance of a subclass of |
|
473 AbstractRpcServer |
|
474 config: The AppInfoExternal object derived from the app.yaml file. |
|
475 cron: The CronInfoExternal object loaded from the cron.yaml file. |
|
476 """ |
|
477 self.server = server |
|
478 self.config = config |
|
479 self.cron = cron |
|
480 |
|
481 def DoUpload(self): |
|
482 """Uploads the cron entries.""" |
|
483 StatusUpdate("Uploading cron entries.") |
|
484 self.server.Send("/api/datastore/cron/update", |
|
485 app_id=self.config.application, |
|
486 version=self.config.version, |
|
487 payload=self.cron.ToYAML()) |
|
739 |
488 |
740 |
489 |
741 class IndexOperation(object): |
490 class IndexOperation(object): |
742 """Provide facilities for writing Index operation commands.""" |
491 """Provide facilities for writing Index operation commands.""" |
743 |
492 |
838 |
587 |
839 confirmation = self.confirmation_fn( |
588 confirmation = self.confirmation_fn( |
840 "Are you sure you want to delete this index? (N/y/a): ") |
589 "Are you sure you want to delete this index? (N/y/a): ") |
841 confirmation = confirmation.strip().lower() |
590 confirmation = confirmation.strip().lower() |
842 |
591 |
843 if confirmation == 'y': |
592 if confirmation == "y": |
844 return True |
593 return True |
845 elif confirmation == 'n' or confirmation == '': |
594 elif confirmation == "n" or not confirmation: |
846 return False |
595 return False |
847 elif confirmation == 'a': |
596 elif confirmation == "a": |
848 self.force = True |
597 self.force = True |
849 return True |
598 return True |
850 else: |
599 else: |
851 print "Did not understand your response." |
600 print "Did not understand your response." |
852 |
601 |
866 |
615 |
867 Args: |
616 Args: |
868 definitions: datastore_index.IndexDefinitions as loaded from users |
617 definitions: datastore_index.IndexDefinitions as loaded from users |
869 index.yaml file. |
618 index.yaml file. |
870 """ |
619 """ |
871 new_indexes, unused_indexes = self.DoDiff(definitions) |
620 unused_new_indexes, notused_indexes = self.DoDiff(definitions) |
872 |
621 |
873 deletions = datastore_index.IndexDefinitions(indexes=[]) |
622 deletions = datastore_index.IndexDefinitions(indexes=[]) |
874 if unused_indexes.indexes is not None: |
623 if notused_indexes.indexes is not None: |
875 for index in unused_indexes.indexes: |
624 for index in notused_indexes.indexes: |
876 if self.force or self.GetConfirmation(index): |
625 if self.force or self.GetConfirmation(index): |
877 deletions.indexes.append(index) |
626 deletions.indexes.append(index) |
878 |
627 |
879 if len(deletions.indexes) > 0: |
628 if deletions.indexes: |
880 not_deleted = self.DoDelete(deletions) |
629 not_deleted = self.DoDelete(deletions) |
881 |
630 |
882 if not_deleted.indexes: |
631 if not_deleted.indexes: |
883 not_deleted_count = len(not_deleted.indexes) |
632 not_deleted_count = len(not_deleted.indexes) |
884 if not_deleted_count == 1: |
633 if not_deleted_count == 1: |
885 warning_message = ('An index was not deleted. Most likely this is ' |
634 warning_message = ("An index was not deleted. Most likely this is " |
886 'because it no longer exists.\n\n') |
635 "because it no longer exists.\n\n") |
887 else: |
636 else: |
888 warning_message = ('%d indexes were not deleted. Most likely this ' |
637 warning_message = ("%d indexes were not deleted. Most likely this " |
889 'is because they no longer exist.\n\n' |
638 "is because they no longer exist.\n\n" |
890 % not_deleted_count) |
639 % not_deleted_count) |
891 for index in not_deleted.indexes: |
640 for index in not_deleted.indexes: |
892 warning_message = warning_message + index.ToYAML() |
641 warning_message += index.ToYAML() |
893 logging.warning(warning_message) |
642 logging.warning(warning_message) |
894 |
643 |
895 |
644 |
896 class LogsRequester(object): |
645 class LogsRequester(object): |
897 """Provide facilities to export request logs.""" |
646 """Provide facilities to export request logs.""" |
923 self.sentinel = FindSentinel(self.output_file) |
672 self.sentinel = FindSentinel(self.output_file) |
924 self.write_mode = "a" |
673 self.write_mode = "a" |
925 self.valid_dates = None |
674 self.valid_dates = None |
926 if self.num_days: |
675 if self.num_days: |
927 patterns = [] |
676 patterns = [] |
677 now = PacificTime(now) |
|
928 for i in xrange(self.num_days): |
678 for i in xrange(self.num_days): |
929 then = time.gmtime(now - 24*3600 * i) |
679 then = time.gmtime(now - 24*3600 * i) |
930 patterns.append(re.escape(time.strftime("%d/%m/%Y", then))) |
680 patterns.append(re.escape(time.strftime("%d/%m/%Y", then))) |
931 patterns.append(re.escape(time.strftime("%d/%b/%Y", then))) |
681 patterns.append(re.escape(time.strftime("%d/%b/%Y", then))) |
932 self.valid_dates = re.compile(r"[^[]+\[(" + "|".join(patterns) + r"):") |
682 self.valid_dates = re.compile(r"[^[]+\[(" + "|".join(patterns) + r"):") |
982 Returns: |
732 Returns: |
983 The offset string to be used for the next request, if another |
733 The offset string to be used for the next request, if another |
984 request should be issued; or None, if not. |
734 request should be issued; or None, if not. |
985 """ |
735 """ |
986 logging.info("Request with offset %r.", offset) |
736 logging.info("Request with offset %r.", offset) |
987 kwds = {'app_id': self.config.application, |
737 kwds = {"app_id": self.config.application, |
988 'version': self.version_id, |
738 "version": self.version_id, |
989 'limit': 100, |
739 "limit": 100, |
990 } |
740 } |
991 if offset: |
741 if offset: |
992 kwds['offset'] = offset |
742 kwds["offset"] = offset |
993 if self.severity is not None: |
743 if self.severity is not None: |
994 kwds['severity'] = str(self.severity) |
744 kwds["severity"] = str(self.severity) |
995 response = self.server.Send("/api/request_logs", payload=None, **kwds) |
745 response = self.server.Send("/api/request_logs", payload=None, **kwds) |
996 response = response.replace("\r", "\0") |
746 response = response.replace("\r", "\0") |
997 lines = response.splitlines() |
747 lines = response.splitlines() |
998 logging.info("Received %d bytes, %d records.", len(response), len(lines)) |
748 logging.info("Received %d bytes, %d records.", len(response), len(lines)) |
999 offset = None |
749 offset = None |
1000 if lines and lines[0].startswith('#'): |
750 if lines and lines[0].startswith("#"): |
1001 match = re.match(r'^#\s*next_offset=(\S+)\s*$', lines[0]) |
751 match = re.match(r"^#\s*next_offset=(\S+)\s*$", lines[0]) |
1002 del lines[0] |
752 del lines[0] |
1003 if match: |
753 if match: |
1004 offset = match.group(1) |
754 offset = match.group(1) |
1005 if lines and lines[-1].startswith('#'): |
755 if lines and lines[-1].startswith("#"): |
1006 del lines[-1] |
756 del lines[-1] |
1007 valid_dates = self.valid_dates |
757 valid_dates = self.valid_dates |
1008 sentinel = self.sentinel |
758 sentinel = self.sentinel |
1009 len_sentinel = None |
759 len_sentinel = None |
1010 if sentinel: |
760 if sentinel: |
1013 if ((sentinel and |
763 if ((sentinel and |
1014 line.startswith(sentinel) and |
764 line.startswith(sentinel) and |
1015 line[len_sentinel : len_sentinel+1] in ("", "\0")) or |
765 line[len_sentinel : len_sentinel+1] in ("", "\0")) or |
1016 (valid_dates and not valid_dates.match(line))): |
766 (valid_dates and not valid_dates.match(line))): |
1017 return None |
767 return None |
1018 tf.write(line + '\n') |
768 tf.write(line + "\n") |
1019 if not lines: |
769 if not lines: |
1020 return None |
770 return None |
1021 return offset |
771 return offset |
1022 |
772 |
1023 |
773 |
1024 def CopyReversedLines(input, output, blocksize=2**16): |
774 def PacificTime(now): |
775 """Helper to return the number of seconds between UTC and Pacific time. |
|
776 |
|
777 This is needed to compute today's date in Pacific time (more |
|
778 specifically: Mountain View local time), which is how request logs |
|
779 are reported. (Google servers always report times in Mountain View |
|
780 local time, regardless of where they are physically located.) |
|
781 |
|
782 This takes (post-2006) US DST into account. Pacific time is either |
|
783 8 hours or 7 hours west of UTC, depending on whether DST is in |
|
784 effect. Since 2007, US DST starts on the Second Sunday in March |
|
785 March, and ends on the first Sunday in November. (Reference: |
|
786 http://aa.usno.navy.mil/faq/docs/daylight_time.php.) |
|
787 |
|
788 Note that the server doesn't report its local time (the HTTP Date |
|
789 header uses UTC), and the client's local time is irrelevant. |
|
790 |
|
791 Args: |
|
792 A posix timestamp giving current UTC time. |
|
793 |
|
794 Returns: |
|
795 A pseudo-posix timestamp giving current Pacific time. Passing |
|
796 this through time.gmtime() will produce a tuple in Pacific local |
|
797 time. |
|
798 """ |
|
799 now -= 8*3600 |
|
800 if IsPacificDST(now): |
|
801 now += 3600 |
|
802 return now |
|
803 |
|
804 |
|
805 def IsPacificDST(now): |
|
806 """Helper for PacificTime to decide whether now is Pacific DST (PDT). |
|
807 |
|
808 Args: |
|
809 now: A pseudo-posix timestamp giving current time in PST. |
|
810 |
|
811 Returns: |
|
812 True if now falls within the range of DST, False otherwise. |
|
813 """ |
|
814 DAY = 24*3600 |
|
815 SUNDAY = 6 |
|
816 pst = time.gmtime(now) |
|
817 year = pst[0] |
|
818 assert year >= 2007 |
|
819 begin = calendar.timegm((year, 3, 8, 2, 0, 0, 0, 0, 0)) |
|
820 while time.gmtime(begin).tm_wday != SUNDAY: |
|
821 begin += DAY |
|
822 end = calendar.timegm((year, 11, 1, 2, 0, 0, 0, 0, 0)) |
|
823 while time.gmtime(end).tm_wday != SUNDAY: |
|
824 end += DAY |
|
825 return begin <= now < end |
|
826 |
|
827 |
|
828 def CopyReversedLines(instream, outstream, blocksize=2**16): |
|
1025 r"""Copy lines from input stream to output stream in reverse order. |
829 r"""Copy lines from input stream to output stream in reverse order. |
1026 |
830 |
1027 As a special feature, null bytes in the input are turned into |
831 As a special feature, null bytes in the input are turned into |
1028 newlines followed by tabs in the output, but these "sub-lines" |
832 newlines followed by tabs in the output, but these "sub-lines" |
1029 separated by null bytes are not reversed. E.g. If the input is |
833 separated by null bytes are not reversed. E.g. If the input is |
1030 "A\0B\nC\0D\n", the output is "C\n\tD\nA\n\tB\n". |
834 "A\0B\nC\0D\n", the output is "C\n\tD\nA\n\tB\n". |
1031 |
835 |
1032 Args: |
836 Args: |
1033 input: A seekable stream open for reading in binary mode. |
837 instream: A seekable stream open for reading in binary mode. |
1034 output: A stream open for writing; doesn't have to be seekable or binary. |
838 outstream: A stream open for writing; doesn't have to be seekable or binary. |
1035 blocksize: Optional block size for buffering, for unit testing. |
839 blocksize: Optional block size for buffering, for unit testing. |
1036 |
840 |
1037 Returns: |
841 Returns: |
1038 The number of lines copied. |
842 The number of lines copied. |
1039 """ |
843 """ |
1040 line_count = 0 |
844 line_count = 0 |
1041 input.seek(0, 2) |
845 instream.seek(0, 2) |
1042 last_block = input.tell() // blocksize |
846 last_block = instream.tell() // blocksize |
1043 spillover = "" |
847 spillover = "" |
1044 for iblock in xrange(last_block + 1, -1, -1): |
848 for iblock in xrange(last_block + 1, -1, -1): |
1045 input.seek(iblock * blocksize) |
849 instream.seek(iblock * blocksize) |
1046 data = input.read(blocksize) |
850 data = instream.read(blocksize) |
1047 lines = data.splitlines(True) |
851 lines = data.splitlines(True) |
1048 lines[-1:] = "".join(lines[-1:] + [spillover]).splitlines(True) |
852 lines[-1:] = "".join(lines[-1:] + [spillover]).splitlines(True) |
1049 if lines and not lines[-1].endswith("\n"): |
853 if lines and not lines[-1].endswith("\n"): |
1050 lines[-1] += "\n" |
854 lines[-1] += "\n" |
1051 lines.reverse() |
855 lines.reverse() |
1052 if lines and iblock > 0: |
856 if lines and iblock > 0: |
1053 spillover = lines.pop() |
857 spillover = lines.pop() |
1054 if lines: |
858 if lines: |
1055 line_count += len(lines) |
859 line_count += len(lines) |
1056 data = "".join(lines).replace("\0", "\n\t") |
860 data = "".join(lines).replace("\0", "\n\t") |
1057 output.write(data) |
861 outstream.write(data) |
1058 return line_count |
862 return line_count |
1059 |
863 |
1060 |
864 |
1061 def FindSentinel(filename, blocksize=2**16): |
865 def FindSentinel(filename, blocksize=2**16): |
1062 """Return the sentinel line from the output file. |
866 """Return the sentinel line from the output file. |
1128 self.in_transaction = False |
932 self.in_transaction = False |
1129 |
933 |
1130 def _Hash(self, content): |
934 def _Hash(self, content): |
1131 """Compute the hash of the content. |
935 """Compute the hash of the content. |
1132 |
936 |
1133 Arg: |
937 Args: |
1134 content: The data to hash as a string. |
938 content: The data to hash as a string. |
1135 |
939 |
1136 Returns: |
940 Returns: |
1137 The string representation of the hash. |
941 The string representation of the hash. |
1138 """ |
942 """ |
1139 h = sha.new(content).hexdigest() |
943 h = sha.new(content).hexdigest() |
1140 return '%s_%s_%s_%s_%s' % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40]) |
944 return "%s_%s_%s_%s_%s" % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40]) |
1141 |
945 |
1142 def AddFile(self, path, file_handle): |
946 def AddFile(self, path, file_handle): |
1143 """Adds the provided file to the list to be pushed to the server. |
947 """Adds the provided file to the list to be pushed to the server. |
1144 |
948 |
1145 Args: |
949 Args: |
1148 """ |
952 """ |
1149 assert not self.in_transaction, "Already in a transaction." |
953 assert not self.in_transaction, "Already in a transaction." |
1150 assert file_handle is not None |
954 assert file_handle is not None |
1151 |
955 |
1152 reason = appinfo.ValidFilename(path) |
956 reason = appinfo.ValidFilename(path) |
1153 if reason != '': |
957 if reason: |
1154 logging.error(reason) |
958 logging.error(reason) |
1155 return |
959 return |
1156 |
960 |
1157 pos = file_handle.tell() |
961 pos = file_handle.tell() |
1158 content_hash = self._Hash(file_handle.read()) |
962 content_hash = self._Hash(file_handle.read()) |
1186 files_to_clone.append((path, content_hash)) |
990 files_to_clone.append((path, content_hash)) |
1187 |
991 |
1188 files_to_upload = {} |
992 files_to_upload = {} |
1189 |
993 |
1190 def CloneFiles(url, files, file_type): |
994 def CloneFiles(url, files, file_type): |
1191 if len(files) == 0: |
995 """Sends files to the given url. |
996 |
|
997 Args: |
|
998 url: the server URL to use. |
|
999 files: a list of files |
|
1000 file_type: the type of the files |
|
1001 """ |
|
1002 if not files: |
|
1192 return |
1003 return |
1193 |
1004 |
1194 StatusUpdate("Cloning %d %s file%s." % |
1005 StatusUpdate("Cloning %d %s file%s." % |
1195 (len(files), file_type, len(files) != 1 and "s" or "")) |
1006 (len(files), file_type, len(files) != 1 and "s" or "")) |
1196 for i in xrange(0, len(files), MAX_FILES_TO_CLONE): |
1007 for i in xrange(0, len(files), MAX_FILES_TO_CLONE): |
1206 (f, self.files[f]) for f in result.split(LIST_DELIMITER))) |
1017 (f, self.files[f]) for f in result.split(LIST_DELIMITER))) |
1207 |
1018 |
1208 CloneFiles("/api/appversion/cloneblobs", blobs_to_clone, "static") |
1019 CloneFiles("/api/appversion/cloneblobs", blobs_to_clone, "static") |
1209 CloneFiles("/api/appversion/clonefiles", files_to_clone, "application") |
1020 CloneFiles("/api/appversion/clonefiles", files_to_clone, "application") |
1210 |
1021 |
1211 logging.info('Files to upload: ' + str(files_to_upload)) |
1022 logging.info("Files to upload: " + str(files_to_upload)) |
1212 |
1023 |
1213 self.files = files_to_upload |
1024 self.files = files_to_upload |
1214 return sorted(files_to_upload.iterkeys()) |
1025 return sorted(files_to_upload.iterkeys()) |
1215 |
1026 |
1216 def UploadFile(self, path, file_handle): |
1027 def UploadFile(self, path, file_handle): |
1278 max_size: The maximum size file to upload. |
1089 max_size: The maximum size file to upload. |
1279 openfunc: A function that takes a path and returns a file-like object. |
1090 openfunc: A function that takes a path and returns a file-like object. |
1280 """ |
1091 """ |
1281 logging.info("Reading app configuration.") |
1092 logging.info("Reading app configuration.") |
1282 |
1093 |
1094 path = "" |
|
1283 try: |
1095 try: |
1284 StatusUpdate("Scanning files on local disk.") |
1096 StatusUpdate("Scanning files on local disk.") |
1285 num_files = 0 |
1097 num_files = 0 |
1286 for path in paths: |
1098 for path in paths: |
1287 file_handle = openfunc(path) |
1099 file_handle = openfunc(path) |
1311 path, e) |
1123 path, e) |
1312 raise |
1124 raise |
1313 |
1125 |
1314 try: |
1126 try: |
1315 missing_files = self.Begin() |
1127 missing_files = self.Begin() |
1316 if len(missing_files) > 0: |
1128 if missing_files: |
1317 StatusUpdate("Uploading %d files." % len(missing_files)) |
1129 StatusUpdate("Uploading %d files." % len(missing_files)) |
1318 num_files = 0 |
1130 num_files = 0 |
1319 for missing_file in missing_files: |
1131 for missing_file in missing_files: |
1320 logging.info("Uploading file '%s'" % missing_file) |
1132 logging.info("Uploading file '%s'" % missing_file) |
1321 file_handle = openfunc(missing_file) |
1133 file_handle = openfunc(missing_file) |
1381 length = fh.tell() |
1193 length = fh.tell() |
1382 fh.seek(pos, 0) |
1194 fh.seek(pos, 0) |
1383 return length |
1195 return length |
1384 |
1196 |
1385 |
1197 |
1386 def GetPlatformToken(os_module=os, sys_module=sys, platform=sys.platform): |
1198 def GetUserAgent(get_version=GetVersionObject, |
1387 """Returns a 'User-agent' token for the host system platform. |
1199 get_platform=appengine_rpc.GetPlatformToken): |
1388 |
|
1389 Args: |
|
1390 os_module, sys_module, platform: Used for testing. |
|
1391 |
|
1392 Returns: |
|
1393 String containing the platform token for the host system. |
|
1394 """ |
|
1395 if hasattr(sys_module, "getwindowsversion"): |
|
1396 windows_version = sys_module.getwindowsversion() |
|
1397 version_info = ".".join(str(i) for i in windows_version[:4]) |
|
1398 return platform + "/" + version_info |
|
1399 elif hasattr(os_module, "uname"): |
|
1400 uname = os_module.uname() |
|
1401 return "%s/%s" % (uname[0], uname[2]) |
|
1402 else: |
|
1403 return "unknown" |
|
1404 |
|
1405 |
|
1406 def GetUserAgent(get_version=GetVersionObject, get_platform=GetPlatformToken): |
|
1407 """Determines the value of the 'User-agent' header to use for HTTP requests. |
1200 """Determines the value of the 'User-agent' header to use for HTTP requests. |
1408 |
1201 |
1409 If the 'APPCFG_SDK_NAME' environment variable is present, that will be |
1202 If the 'APPCFG_SDK_NAME' environment variable is present, that will be |
1410 used as the first product token in the user-agent. |
1203 used as the first product token in the user-agent. |
1411 |
1204 |
1412 Args: |
1205 Args: |
1413 get_version, get_platform: Used for testing. |
1206 get_version: Used for testing. |
1207 get_platform: Used for testing. |
|
1414 |
1208 |
1415 Returns: |
1209 Returns: |
1416 String containing the 'user-agent' header value, which includes the SDK |
1210 String containing the 'user-agent' header value, which includes the SDK |
1417 version, the platform information, and the version of Python; |
1211 version, the platform information, and the version of Python; |
1418 e.g., "appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2". |
1212 e.g., "appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2". |
1435 |
1229 |
1436 python_version = ".".join(str(i) for i in sys.version_info) |
1230 python_version = ".".join(str(i) for i in sys.version_info) |
1437 product_tokens.append("Python/%s" % python_version) |
1231 product_tokens.append("Python/%s" % python_version) |
1438 |
1232 |
1439 return " ".join(product_tokens) |
1233 return " ".join(product_tokens) |
1234 |
|
1235 |
|
1236 def GetSourceName(get_version=GetVersionObject): |
|
1237 """Gets the name of this source version.""" |
|
1238 version = get_version() |
|
1239 if version is None: |
|
1240 release = "unknown" |
|
1241 else: |
|
1242 release = version["release"] |
|
1243 return "Google-appcfg-%s" % (release,) |
|
1440 |
1244 |
1441 |
1245 |
1442 class AppCfgApp(object): |
1246 class AppCfgApp(object): |
1443 """Singleton class to wrap AppCfg tool functionality. |
1247 """Singleton class to wrap AppCfg tool functionality. |
1444 |
1248 |
1462 OptionsParser will exit the program when there is a parse failure, it |
1266 OptionsParser will exit the program when there is a parse failure, it |
1463 is nice to subclass OptionsParser and catch the error before exiting. |
1267 is nice to subclass OptionsParser and catch the error before exiting. |
1464 """ |
1268 """ |
1465 |
1269 |
1466 def __init__(self, argv, parser_class=optparse.OptionParser, |
1270 def __init__(self, argv, parser_class=optparse.OptionParser, |
1467 rpc_server_class=HttpRpcServer, |
1271 rpc_server_class=appengine_rpc.HttpRpcServer, |
1468 raw_input_fn=raw_input, |
1272 raw_input_fn=raw_input, |
1469 password_input_fn=getpass.getpass, |
1273 password_input_fn=getpass.getpass, |
1470 error_fh=sys.stderr): |
1274 error_fh=sys.stderr): |
1471 """Initializer. Parses the cmdline and selects the Action to use. |
1275 """Initializer. Parses the cmdline and selects the Action to use. |
1472 |
1276 |
1519 """Executes the requested action. |
1323 """Executes the requested action. |
1520 |
1324 |
1521 Catches any HTTPErrors raised by the action and prints them to stderr. |
1325 Catches any HTTPErrors raised by the action and prints them to stderr. |
1522 """ |
1326 """ |
1523 try: |
1327 try: |
1524 self.action.function(self) |
1328 self.action(self) |
1525 except urllib2.HTTPError, e: |
1329 except urllib2.HTTPError, e: |
1526 body = e.read() |
1330 body = e.read() |
1527 print >>self.error_fh, ("Error %d: --- begin server output ---\n" |
1331 print >>self.error_fh, ("Error %d: --- begin server output ---\n" |
1528 "%s\n--- end server output ---" % |
1332 "%s\n--- end server output ---" % |
1529 (e.code, body.rstrip("\n"))) |
1333 (e.code, body.rstrip("\n"))) |
1334 return 1 |
|
1530 except yaml_errors.EventListenerError, e: |
1335 except yaml_errors.EventListenerError, e: |
1531 print >>self.error_fh, ("Error parsing yaml file:\n%s" % e) |
1336 print >>self.error_fh, ("Error parsing yaml file:\n%s" % e) |
1337 return 1 |
|
1338 return 0 |
|
1532 |
1339 |
1533 def _GetActionDescriptions(self): |
1340 def _GetActionDescriptions(self): |
1534 """Returns a formatted string containing the short_descs for all actions.""" |
1341 """Returns a formatted string containing the short_descs for all actions.""" |
1535 action_names = self.actions.keys() |
1342 action_names = self.actions.keys() |
1536 action_names.sort() |
1343 action_names.sort() |
1546 An OptionParser instance. |
1353 An OptionParser instance. |
1547 """ |
1354 """ |
1548 |
1355 |
1549 class Formatter(optparse.IndentedHelpFormatter): |
1356 class Formatter(optparse.IndentedHelpFormatter): |
1550 """Custom help formatter that does not reformat the description.""" |
1357 """Custom help formatter that does not reformat the description.""" |
1358 |
|
1551 def format_description(self, description): |
1359 def format_description(self, description): |
1360 """Very simple formatter.""" |
|
1552 return description + "\n" |
1361 return description + "\n" |
1553 |
1362 |
1554 desc = self._GetActionDescriptions() |
1363 desc = self._GetActionDescriptions() |
1555 desc = ("Action must be one of:\n%s" |
1364 desc = ("Action must be one of:\n%s" |
1556 "Use 'help <action>' for a detailed description.") % desc |
1365 "Use 'help <action>' for a detailed description.") % desc |
1598 """ |
1407 """ |
1599 parser = self._GetOptionParser() |
1408 parser = self._GetOptionParser() |
1600 parser.set_usage(action.usage) |
1409 parser.set_usage(action.usage) |
1601 parser.set_description("%s\n%s" % (action.short_desc, action.long_desc)) |
1410 parser.set_description("%s\n%s" % (action.short_desc, action.long_desc)) |
1602 action.options(self, parser) |
1411 action.options(self, parser) |
1603 options, args = parser.parse_args(self.argv[1:]) |
1412 options, unused_args = parser.parse_args(self.argv[1:]) |
1604 return parser, options |
1413 return parser, options |
1605 |
1414 |
1606 def _PrintHelpAndExit(self, exit_code=2): |
1415 def _PrintHelpAndExit(self, exit_code=2): |
1607 """Prints the parser's help message and exits the program. |
1416 """Prints the parser's help message and exits the program. |
1608 |
1417 |
1639 email = "test@example.com" |
1448 email = "test@example.com" |
1640 logging.info("Using debug user %s. Override with --email" % email) |
1449 logging.info("Using debug user %s. Override with --email" % email) |
1641 server = self.rpc_server_class( |
1450 server = self.rpc_server_class( |
1642 self.options.server, |
1451 self.options.server, |
1643 lambda: (email, "password"), |
1452 lambda: (email, "password"), |
1453 GetUserAgent(), |
|
1454 GetSourceName(), |
|
1644 host_override=self.options.host, |
1455 host_override=self.options.host, |
1645 extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email}, |
|
1646 save_cookies=self.options.save_cookies) |
1456 save_cookies=self.options.save_cookies) |
1647 server.authenticated = True |
1457 server.authenticated = True |
1648 return server |
1458 return server |
1649 |
1459 |
1460 if self.options.passin: |
|
1461 auth_tries = 1 |
|
1462 else: |
|
1463 auth_tries = 3 |
|
1464 |
|
1650 return self.rpc_server_class(self.options.server, GetUserCredentials, |
1465 return self.rpc_server_class(self.options.server, GetUserCredentials, |
1466 GetUserAgent(), GetSourceName(), |
|
1651 host_override=self.options.host, |
1467 host_override=self.options.host, |
1652 save_cookies=self.options.save_cookies) |
1468 save_cookies=self.options.save_cookies, |
1469 auth_tries=auth_tries, |
|
1470 account_type="HOSTED_OR_GOOGLE") |
|
1653 |
1471 |
1654 def _FindYaml(self, basepath, file_name): |
1472 def _FindYaml(self, basepath, file_name): |
1655 """Find yaml files in application directory. |
1473 """Find yaml files in application directory. |
1656 |
1474 |
1657 Args: |
1475 Args: |
1662 Path to located yaml file if one exists, else None. |
1480 Path to located yaml file if one exists, else None. |
1663 """ |
1481 """ |
1664 if not os.path.isdir(basepath): |
1482 if not os.path.isdir(basepath): |
1665 self.parser.error("Not a directory: %s" % basepath) |
1483 self.parser.error("Not a directory: %s" % basepath) |
1666 |
1484 |
1667 for yaml_file in (file_name + '.yaml', file_name + '.yml'): |
1485 for yaml_file in (file_name + ".yaml", file_name + ".yml"): |
1668 yaml_path = os.path.join(basepath, yaml_file) |
1486 yaml_path = os.path.join(basepath, yaml_file) |
1669 if os.path.isfile(yaml_path): |
1487 if os.path.isfile(yaml_path): |
1670 return yaml_path |
1488 return yaml_path |
1671 |
1489 |
1672 return None |
1490 return None |
1673 |
1491 |
1674 def _ParseAppYaml(self, basepath): |
1492 def _ParseAppYaml(self, basepath): |
1675 """Parses the app.yaml file. |
1493 """Parses the app.yaml file. |
1494 |
|
1495 Args: |
|
1496 basepath: the directory of the application. |
|
1676 |
1497 |
1677 Returns: |
1498 Returns: |
1678 An AppInfoExternal object. |
1499 An AppInfoExternal object. |
1679 """ |
1500 """ |
1680 appyaml_filename = self._FindYaml(basepath, "app") |
1501 appyaml_filename = self._FindYaml(basepath, "app") |
1689 fh.close() |
1510 fh.close() |
1690 return appyaml |
1511 return appyaml |
1691 |
1512 |
1692 def _ParseIndexYaml(self, basepath): |
1513 def _ParseIndexYaml(self, basepath): |
1693 """Parses the index.yaml file. |
1514 """Parses the index.yaml file. |
1515 |
|
1516 Args: |
|
1517 basepath: the directory of the application. |
|
1694 |
1518 |
1695 Returns: |
1519 Returns: |
1696 A single parsed yaml file or None if the file does not exist. |
1520 A single parsed yaml file or None if the file does not exist. |
1697 """ |
1521 """ |
1698 file_name = self._FindYaml(basepath, "index") |
1522 file_name = self._FindYaml(basepath, "index") |
1703 finally: |
1527 finally: |
1704 fh.close() |
1528 fh.close() |
1705 return index_defs |
1529 return index_defs |
1706 return None |
1530 return None |
1707 |
1531 |
1532 def _ParseCronYaml(self, basepath): |
|
1533 """Parses the cron.yaml file. |
|
1534 |
|
1535 Args: |
|
1536 basepath: the directory of the application. |
|
1537 |
|
1538 Returns: |
|
1539 A CronInfoExternal object. |
|
1540 """ |
|
1541 file_name = self._FindYaml(basepath, "cron") |
|
1542 if file_name is not None: |
|
1543 fh = open(file_name, "r") |
|
1544 try: |
|
1545 cron_info = croninfo.LoadSingleCron(fh) |
|
1546 finally: |
|
1547 fh.close() |
|
1548 return cron_info |
|
1549 return None |
|
1550 |
|
1708 def Help(self): |
1551 def Help(self): |
1709 """Prints help for a specific action. |
1552 """Prints help for a specific action. |
1710 |
1553 |
1711 Expects self.args[0] to contain the name of the action in question. |
1554 Expects self.args[0] to contain the name of the action in question. |
1712 Exits the program after printing the help message. |
1555 Exits the program after printing the help message. |
1714 if len(self.args) != 1 or self.args[0] not in self.actions: |
1557 if len(self.args) != 1 or self.args[0] not in self.actions: |
1715 self.parser.error("Expected a single action argument. Must be one of:\n" + |
1558 self.parser.error("Expected a single action argument. Must be one of:\n" + |
1716 self._GetActionDescriptions()) |
1559 self._GetActionDescriptions()) |
1717 |
1560 |
1718 action = self.actions[self.args[0]] |
1561 action = self.actions[self.args[0]] |
1719 self.parser, options = self._MakeSpecificParser(action) |
1562 self.parser, unused_options = self._MakeSpecificParser(action) |
1720 self._PrintHelpAndExit(exit_code=0) |
1563 self._PrintHelpAndExit(exit_code=0) |
1721 |
1564 |
1722 def Update(self): |
1565 def Update(self): |
1723 """Updates and deploys a new appversion.""" |
1566 """Updates and deploys a new appversion.""" |
1724 if len(self.args) != 1: |
1567 if len(self.args) != 1: |
1743 except urllib2.HTTPError, e: |
1586 except urllib2.HTTPError, e: |
1744 StatusUpdate("Error %d: --- begin server output ---\n" |
1587 StatusUpdate("Error %d: --- begin server output ---\n" |
1745 "%s\n--- end server output ---" % |
1588 "%s\n--- end server output ---" % |
1746 (e.code, e.read().rstrip("\n"))) |
1589 (e.code, e.read().rstrip("\n"))) |
1747 print >> self.error_fh, ( |
1590 print >> self.error_fh, ( |
1748 "Your app was updated, but there was an error updating your indexes. " |
1591 "Your app was updated, but there was an error updating your " |
1749 "Please retry later with appcfg.py update_indexes.") |
1592 "indexes. Please retry later with appcfg.py update_indexes.") |
1593 |
|
1594 cron_entries = self._ParseCronYaml(basepath) |
|
1595 if cron_entries: |
|
1596 cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries) |
|
1597 cron_upload.DoUpload() |
|
1750 |
1598 |
1751 def _UpdateOptions(self, parser): |
1599 def _UpdateOptions(self, parser): |
1752 """Adds update-specific options to 'parser'. |
1600 """Adds update-specific options to 'parser'. |
1753 |
1601 |
1754 Args: |
1602 Args: |
1783 parser: An instance of OptionsParser. |
1631 parser: An instance of OptionsParser. |
1784 """ |
1632 """ |
1785 parser.add_option("-f", "--force", action="store_true", dest="force_delete", |
1633 parser.add_option("-f", "--force", action="store_true", dest="force_delete", |
1786 default=False, |
1634 default=False, |
1787 help="Force deletion without being prompted.") |
1635 help="Force deletion without being prompted.") |
1636 |
|
1637 def UpdateCron(self): |
|
1638 """Updates any new or changed cron definitions.""" |
|
1639 if len(self.args) != 1: |
|
1640 self.parser.error("Expected a single <directory> argument.") |
|
1641 |
|
1642 basepath = self.args[0] |
|
1643 appyaml = self._ParseAppYaml(basepath) |
|
1644 rpc_server = self._GetRpcServer() |
|
1645 |
|
1646 cron_entries = self._ParseCronYaml(basepath) |
|
1647 if cron_entries: |
|
1648 cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries) |
|
1649 cron_upload.DoUpload() |
|
1788 |
1650 |
1789 def UpdateIndexes(self): |
1651 def UpdateIndexes(self): |
1790 """Updates indexes.""" |
1652 """Updates indexes.""" |
1791 if len(self.args) != 1: |
1653 if len(self.args) != 1: |
1792 self.parser.error("Expected a single <directory> argument.") |
1654 self.parser.error("Expected a single <directory> argument.") |
1833 self.options.severity, |
1695 self.options.severity, |
1834 time.time()) |
1696 time.time()) |
1835 logs_requester.DownloadLogs() |
1697 logs_requester.DownloadLogs() |
1836 |
1698 |
1837 def _RequestLogsOptions(self, parser): |
1699 def _RequestLogsOptions(self, parser): |
1838 """Ads request_logs-specific options to 'parser'. |
1700 """Adds request_logs-specific options to 'parser'. |
1839 |
1701 |
1840 Args: |
1702 Args: |
1841 parser: An instance of OptionsParser. |
1703 parser: An instance of OptionsParser. |
1842 """ |
1704 """ |
1843 parser.add_option("-n", "--num_days", type="int", dest="num_days", |
1705 parser.add_option("-n", "--num_days", type="int", dest="num_days", |
1844 action="store", default=None, |
1706 action="store", default=None, |
1845 help="Number of days worth of log data to get. " |
1707 help="Number of days worth of log data to get. " |
1846 "The cut-off point is midnight UTC. " |
1708 "The cut-off point is midnight UTC. " |
1847 "Use 0 to get all available logs. " |
1709 "Use 0 to get all available logs. " |
1848 "Default is 1, unless --append is also given; " |
1710 "Default is 1, unless --append is also given; " |
1849 "then the default is 0.") |
1711 "then the default is 0.") |
1850 parser.add_option("-a", "--append", dest="append", |
1712 parser.add_option("-a", "--append", dest="append", |
1851 action="store_true", default=False, |
1713 action="store_true", default=False, |
1852 help="Append to existing file.") |
1714 help="Append to existing file.") |
1853 parser.add_option("--severity", type="int", dest="severity", |
1715 parser.add_option("--severity", type="int", dest="severity", |
1854 action="store", default=None, |
1716 action="store", default=None, |
1855 help="Severity of app-level log messages to get. " |
1717 help="Severity of app-level log messages to get. " |
1856 "The range is 0 (DEBUG) through 4 (CRITICAL). " |
1718 "The range is 0 (DEBUG) through 4 (CRITICAL). " |
1857 "If omitted, only request logs are returned.") |
1719 "If omitted, only request logs are returned.") |
1720 |
|
1721 def CronInfo(self, now=None, output=sys.stdout): |
|
1722 """Displays information about cron definitions. |
|
1723 |
|
1724 Args: |
|
1725 now: used for testing. |
|
1726 output: Used for testing. |
|
1727 """ |
|
1728 if len(self.args) != 1: |
|
1729 self.parser.error("Expected a single <directory> argument.") |
|
1730 if now is None: |
|
1731 now = datetime.datetime.now() |
|
1732 |
|
1733 basepath = self.args[0] |
|
1734 cron_entries = self._ParseCronYaml(basepath) |
|
1735 if cron_entries: |
|
1736 for entry in cron_entries.cron: |
|
1737 description = entry.description |
|
1738 if not description: |
|
1739 description = "<no description>" |
|
1740 print >>output, "\n%s:\nURL: %s\nSchedule: %s" % (description, |
|
1741 entry.schedule, |
|
1742 entry.url) |
|
1743 schedule = groctimespecification.GrocTimeSpecification(entry.schedule) |
|
1744 matches = schedule.GetMatches(now, self.options.num_runs) |
|
1745 for match in matches: |
|
1746 print >>output, "%s, %s from now" % ( |
|
1747 match.strftime("%Y-%m-%d %H:%M:%S"), match - now) |
|
1748 |
|
1749 def _CronInfoOptions(self, parser): |
|
1750 """Adds cron_info-specific options to 'parser'. |
|
1751 |
|
1752 Args: |
|
1753 parser: An instance of OptionsParser. |
|
1754 """ |
|
1755 parser.add_option("-n", "--num_runs", type="int", dest="num_runs", |
|
1756 action="store", default=5, |
|
1757 help="Number of runs of each cron job to display" |
|
1758 "Default is 5") |
|
1858 |
1759 |
1859 class Action(object): |
1760 class Action(object): |
1860 """Contains information about a command line action. |
1761 """Contains information about a command line action. |
1861 |
1762 |
1862 Attributes: |
1763 Attributes: |
1863 function: An AppCfgApp function that will perform the appropriate |
1764 function: The name of a function defined on AppCfg or its subclasses |
1864 action. |
1765 that will perform the appropriate action. |
1865 usage: A command line usage string. |
1766 usage: A command line usage string. |
1866 short_desc: A one-line description of the action. |
1767 short_desc: A one-line description of the action. |
1867 long_desc: A detailed description of the action. Whitespace and |
1768 long_desc: A detailed description of the action. Whitespace and |
1868 formatting will be preserved. |
1769 formatting will be preserved. |
1869 options: A function that will add extra options to a given OptionParser |
1770 options: A function that will add extra options to a given OptionParser |
1877 self.usage = usage |
1778 self.usage = usage |
1878 self.short_desc = short_desc |
1779 self.short_desc = short_desc |
1879 self.long_desc = long_desc |
1780 self.long_desc = long_desc |
1880 self.options = options |
1781 self.options = options |
1881 |
1782 |
1783 def __call__(self, appcfg): |
|
1784 """Invoke this Action on the specified AppCfg. |
|
1785 |
|
1786 This calls the function of the appropriate name on AppCfg, and |
|
1787 respects polymophic overrides.""" |
|
1788 method = getattr(appcfg, self.function) |
|
1789 return method() |
|
1790 |
|
1882 actions = { |
1791 actions = { |
1883 |
1792 |
1884 "help": Action( |
1793 "help": Action( |
1885 function=Help, |
1794 function="Help", |
1886 usage="%prog help <action>", |
1795 usage="%prog help <action>", |
1887 short_desc="Print help for a specific action."), |
1796 short_desc="Print help for a specific action."), |
1888 |
1797 |
1889 "update": Action( |
1798 "update": Action( |
1890 function=Update, |
1799 function="Update", |
1891 usage="%prog [options] update <directory>", |
1800 usage="%prog [options] update <directory>", |
1892 options=_UpdateOptions, |
1801 options=_UpdateOptions, |
1893 short_desc="Create or update an app version.", |
1802 short_desc="Create or update an app version.", |
1894 long_desc=""" |
1803 long_desc=""" |
1895 Specify a directory that contains all of the files required by |
1804 Specify a directory that contains all of the files required by |
1896 the app, and appcfg.py will create/update the app version referenced |
1805 the app, and appcfg.py will create/update the app version referenced |
1897 in the app.yaml file at the top level of that directory. appcfg.py |
1806 in the app.yaml file at the top level of that directory. appcfg.py |
1898 will follow symlinks and recursively upload all files to the server. |
1807 will follow symlinks and recursively upload all files to the server. |
1899 Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""), |
1808 Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""), |
1900 |
1809 |
1810 |
|
1811 |
|
1812 |
|
1813 |
|
1814 |
|
1815 |
|
1816 |
|
1817 |
|
1818 |
|
1901 "update_indexes": Action( |
1819 "update_indexes": Action( |
1902 function=UpdateIndexes, |
1820 function="UpdateIndexes", |
1903 usage="%prog [options] update_indexes <directory>", |
1821 usage="%prog [options] update_indexes <directory>", |
1904 short_desc="Update application indexes.", |
1822 short_desc="Update application indexes.", |
1905 long_desc=""" |
1823 long_desc=""" |
1906 The 'update_indexes' command will add additional indexes which are not currently |
1824 The 'update_indexes' command will add additional indexes which are not currently |
1907 in production as well as restart any indexes that were not completed."""), |
1825 in production as well as restart any indexes that were not completed."""), |
1908 |
1826 |
1909 "vacuum_indexes": Action( |
1827 "vacuum_indexes": Action( |
1910 function=VacuumIndexes, |
1828 function="VacuumIndexes", |
1911 usage="%prog [options] vacuum_indexes <directory>", |
1829 usage="%prog [options] vacuum_indexes <directory>", |
1912 options=_VacuumIndexesOptions, |
1830 options=_VacuumIndexesOptions, |
1913 short_desc="Delete unused indexes from application.", |
1831 short_desc="Delete unused indexes from application.", |
1914 long_desc=""" |
1832 long_desc=""" |
1915 The 'vacuum_indexes' command will help clean up indexes which are no longer |
1833 The 'vacuum_indexes' command will help clean up indexes which are no longer |
1916 in use. It does this by comparing the local index configuration with |
1834 in use. It does this by comparing the local index configuration with |
1917 indexes that are actually defined on the server. If any indexes on the |
1835 indexes that are actually defined on the server. If any indexes on the |
1918 server do not exist in the index configuration file, the user is given the |
1836 server do not exist in the index configuration file, the user is given the |
1919 option to delete them."""), |
1837 option to delete them."""), |
1920 |
1838 |
1921 "rollback": Action( |
1839 "rollback": Action( |
1922 function=Rollback, |
1840 function="Rollback", |
1923 usage="%prog [options] rollback <directory>", |
1841 usage="%prog [options] rollback <directory>", |
1924 short_desc="Rollback an in-progress update.", |
1842 short_desc="Rollback an in-progress update.", |
1925 long_desc=""" |
1843 long_desc=""" |
1926 The 'update' command requires a server-side transaction. Use 'rollback' |
1844 The 'update' command requires a server-side transaction. Use 'rollback' |
1927 if you get an error message about another transaction being in progress |
1845 if you get an error message about another transaction being in progress |
1928 and you are sure that there is no such transaction."""), |
1846 and you are sure that there is no such transaction."""), |
1929 |
1847 |
1930 "request_logs": Action( |
1848 "request_logs": Action( |
1931 function=RequestLogs, |
1849 function="RequestLogs", |
1932 usage="%prog [options] request_logs <directory> <output_file>", |
1850 usage="%prog [options] request_logs <directory> <output_file>", |
1933 options=_RequestLogsOptions, |
1851 options=_RequestLogsOptions, |
1934 short_desc="Write request logs in Apache common log format.", |
1852 short_desc="Write request logs in Apache common log format.", |
1935 long_desc=""" |
1853 long_desc=""" |
1936 The 'request_logs' command exports the request logs from your application |
1854 The 'request_logs' command exports the request logs from your application |
1937 to a file. It will write Apache common log format records ordered |
1855 to a file. It will write Apache common log format records ordered |
1938 chronologically. If output file is '-' stdout will be written."""), |
1856 chronologically. If output file is '-' stdout will be written."""), |
1857 |
|
1858 |
|
1859 |
|
1860 |
|
1861 |
|
1862 |
|
1863 |
|
1864 |
|
1865 |
|
1866 |
|
1867 |
|
1868 |
|
1939 |
1869 |
1940 } |
1870 } |
1941 |
1871 |
1942 |
1872 |
1943 def main(argv): |
1873 def main(argv): |
1944 logging.basicConfig(format=("%(asctime)s %(levelname)s %(filename)s:" |
1874 logging.basicConfig(format=("%(asctime)s %(levelname)s %(filename)s:" |
1945 "%(lineno)s %(message)s ")) |
1875 "%(lineno)s %(message)s ")) |
1946 try: |
1876 try: |
1947 AppCfgApp(argv).Run() |
1877 result = AppCfgApp(argv).Run() |
1878 if result: |
|
1879 sys.exit(result) |
|
1948 except KeyboardInterrupt: |
1880 except KeyboardInterrupt: |
1949 StatusUpdate("Interrupted.") |
1881 StatusUpdate("Interrupted.") |
1950 sys.exit(1) |
1882 sys.exit(1) |
1951 |
1883 |
1952 |
1884 |