181 A Yaml object or None if the VERSION file does not exist. |
182 A Yaml object or None if the VERSION file does not exist. |
182 """ |
183 """ |
183 version_filename = os.path.join(os.path.dirname(google.__file__), |
184 version_filename = os.path.join(os.path.dirname(google.__file__), |
184 VERSION_FILE) |
185 VERSION_FILE) |
185 if not isfile(version_filename): |
186 if not isfile(version_filename): |
186 logging.error("Could not find version file at %s", version_filename) |
187 logging.error('Could not find version file at %s', version_filename) |
187 return None |
188 return None |
188 |
189 |
189 version_fh = open_fn(version_filename, "r") |
190 version_fh = open_fn(version_filename, 'r') |
190 try: |
191 try: |
191 version = yaml.safe_load(version_fh) |
192 version = yaml.safe_load(version_fh) |
192 finally: |
193 finally: |
193 version_fh.close() |
194 version_fh.close() |
194 |
195 |
195 return version |
196 return version |
196 |
197 |
197 def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable): |
198 |
198 """Calls a function multiple times, backing off more and more each time. |
199 def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable_func): |
199 |
200 """Calls a function multiple times, backing off more and more each time. |
200 Args: |
201 |
201 initial_delay: Initial delay after first try, in seconds. |
202 Args: |
202 backoff_factor: Delay will be multiplied by this factor after each try. |
203 initial_delay: Initial delay after first try, in seconds. |
203 max_tries: Maximum number of tries. |
204 backoff_factor: Delay will be multiplied by this factor after each try. |
204 callable: The method to call, will pass no arguments. |
205 max_tries: Maximum number of tries. |
205 |
206 callable_func: The method to call, will pass no arguments. |
206 Returns: |
207 |
207 True if the function succeded in one of its tries. |
208 Returns: |
208 |
209 True if the function succeded in one of its tries. |
209 Raises: |
210 |
210 Whatever the function raises--an exception will immediately stop retries. |
211 Raises: |
211 """ |
212 Whatever the function raises--an exception will immediately stop retries. |
212 delay = initial_delay |
213 """ |
213 while not callable() and max_tries > 0: |
214 delay = initial_delay |
214 StatusUpdate("Will check again in %s seconds." % delay) |
215 while not callable_func() and max_tries > 0: |
215 time.sleep(delay) |
216 StatusUpdate('Will check again in %s seconds.' % delay) |
216 delay *= backoff_factor |
217 time.sleep(delay) |
217 max_tries -= 1 |
218 delay *= backoff_factor |
218 return max_tries > 0 |
219 max_tries -= 1 |
|
220 return max_tries > 0 |
219 |
221 |
220 |
222 |
221 class UpdateCheck(object): |
223 class UpdateCheck(object): |
222 """Determines if the local SDK is the latest version. |
224 """Determines if the local SDK is the latest version. |
223 |
225 |
285 Raises: |
287 Raises: |
286 SystemExit if the api_version is not supported. |
288 SystemExit if the api_version is not supported. |
287 """ |
289 """ |
288 version = self._ParseVersionFile() |
290 version = self._ParseVersionFile() |
289 if version is None: |
291 if version is None: |
290 logging.error("Could not determine if the SDK supports the api_version " |
292 logging.error('Could not determine if the SDK supports the api_version ' |
291 "requested in app.yaml.") |
293 'requested in app.yaml.') |
292 return |
294 return |
293 if self.config.api_version not in version["api_versions"]: |
295 if self.config.api_version not in version['api_versions']: |
294 logging.critical("The api_version specified in app.yaml (%s) is not " |
296 logging.critical('The api_version specified in app.yaml (%s) is not ' |
295 "supported by this release of the SDK. The supported " |
297 'supported by this release of the SDK. The supported ' |
296 "api_versions are %s.", |
298 'api_versions are %s.', |
297 self.config.api_version, version["api_versions"]) |
299 self.config.api_version, version['api_versions']) |
298 sys.exit(1) |
300 sys.exit(1) |
299 |
301 |
300 def CheckForUpdates(self): |
302 def CheckForUpdates(self): |
301 """Queries the server for updates and nags the user if appropriate. |
303 """Queries the server for updates and nags the user if appropriate. |
302 |
304 |
303 Queries the server for the latest SDK version at the same time reporting |
305 Queries the server for the latest SDK version at the same time reporting |
304 the local SDK version. The server will respond with a yaml document |
306 the local SDK version. The server will respond with a yaml document |
305 containing the fields: |
307 containing the fields: |
306 "release": The name of the release (e.g. 1.2). |
308 'release': The name of the release (e.g. 1.2). |
307 "timestamp": The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ). |
309 'timestamp': The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ). |
308 "api_versions": A list of api_version strings (e.g. ['1', 'beta']). |
310 'api_versions': A list of api_version strings (e.g. ['1', 'beta']). |
309 |
311 |
310 We will nag the user with increasing severity if: |
312 We will nag the user with increasing severity if: |
311 - There is a new release. |
313 - There is a new release. |
312 - There is a new release with a new api_version. |
314 - There is a new release with a new api_version. |
313 - There is a new release that does not support the api_version named in |
315 - There is a new release that does not support the api_version named in |
314 self.config. |
316 self.config. |
315 """ |
317 """ |
316 version = self._ParseVersionFile() |
318 version = self._ParseVersionFile() |
317 if version is None: |
319 if version is None: |
318 logging.info("Skipping update check") |
320 logging.info('Skipping update check') |
319 return |
321 return |
320 logging.info("Checking for updates to the SDK.") |
322 logging.info('Checking for updates to the SDK.') |
321 |
323 |
322 try: |
324 try: |
323 response = self.server.Send("/api/updatecheck", |
325 response = self.server.Send('/api/updatecheck', |
324 timeout=UPDATE_CHECK_TIMEOUT, |
326 timeout=UPDATE_CHECK_TIMEOUT, |
325 release=version["release"], |
327 release=version['release'], |
326 timestamp=version["timestamp"], |
328 timestamp=version['timestamp'], |
327 api_versions=version["api_versions"]) |
329 api_versions=version['api_versions']) |
328 except urllib2.URLError, e: |
330 except urllib2.URLError, e: |
329 logging.info("Update check failed: %s", e) |
331 logging.info('Update check failed: %s', e) |
330 return |
332 return |
331 |
333 |
332 latest = yaml.safe_load(response) |
334 latest = yaml.safe_load(response) |
333 if latest["release"] == version["release"]: |
335 if latest['release'] == version['release']: |
334 logging.info("The SDK is up to date.") |
336 logging.info('The SDK is up to date.') |
335 return |
337 return |
336 |
338 |
337 api_versions = latest["api_versions"] |
339 api_versions = latest['api_versions'] |
338 if self.config.api_version not in api_versions: |
340 if self.config.api_version not in api_versions: |
339 self._Nag( |
341 self._Nag( |
340 "The api version you are using (%s) is obsolete! You should\n" |
342 'The api version you are using (%s) is obsolete! You should\n' |
341 "upgrade your SDK and test that your code works with the new\n" |
343 'upgrade your SDK and test that your code works with the new\n' |
342 "api version." % self.config.api_version, |
344 'api version.' % self.config.api_version, |
343 latest, version, force=True) |
345 latest, version, force=True) |
344 return |
346 return |
345 |
347 |
346 if self.config.api_version != api_versions[len(api_versions) - 1]: |
348 if self.config.api_version != api_versions[len(api_versions) - 1]: |
347 self._Nag( |
349 self._Nag( |
348 "The api version you are using (%s) is deprecated. You should\n" |
350 'The api version you are using (%s) is deprecated. You should\n' |
349 "upgrade your SDK to try the new functionality." % |
351 'upgrade your SDK to try the new functionality.' % |
350 self.config.api_version, latest, version) |
352 self.config.api_version, latest, version) |
351 return |
353 return |
352 |
354 |
353 self._Nag("There is a new release of the SDK available.", |
355 self._Nag('There is a new release of the SDK available.', |
354 latest, version) |
356 latest, version) |
355 |
357 |
356 def _ParseNagFile(self): |
358 def _ParseNagFile(self): |
357 """Parses the nag file. |
359 """Parses the nag file. |
358 |
360 |
359 Returns: |
361 Returns: |
360 A NagFile if the file was present else None. |
362 A NagFile if the file was present else None. |
361 """ |
363 """ |
362 nag_filename = UpdateCheck.MakeNagFilename() |
364 nag_filename = UpdateCheck.MakeNagFilename() |
363 if self.isfile(nag_filename): |
365 if self.isfile(nag_filename): |
364 fh = self.open(nag_filename, "r") |
366 fh = self.open(nag_filename, 'r') |
365 try: |
367 try: |
366 nag = NagFile.Load(fh) |
368 nag = NagFile.Load(fh) |
367 finally: |
369 finally: |
368 fh.close() |
370 fh.close() |
369 return nag |
371 return nag |
685 num_days: Number of days worth of logs to export; 0 for all available. |
712 num_days: Number of days worth of logs to export; 0 for all available. |
686 append: True if appending to an existing file. |
713 append: True if appending to an existing file. |
687 severity: App log severity to request (0-4); None for no app logs. |
714 severity: App log severity to request (0-4); None for no app logs. |
688 now: POSIX timestamp used for calculating valid dates for num_days. |
715 now: POSIX timestamp used for calculating valid dates for num_days. |
689 vhost: The virtual host of log messages to get. None for all hosts. |
716 vhost: The virtual host of log messages to get. None for all hosts. |
|
717 include_vhost: If true, the virtual host is included in log messages. |
690 """ |
718 """ |
691 self.server = server |
719 self.server = server |
692 self.config = config |
720 self.config = config |
693 self.output_file = output_file |
721 self.output_file = output_file |
694 self.append = append |
722 self.append = append |
695 self.num_days = num_days |
723 self.num_days = num_days |
696 self.severity = severity |
724 self.severity = severity |
697 self.vhost = vhost |
725 self.vhost = vhost |
698 self.version_id = self.config.version + ".1" |
726 self.include_vhost = include_vhost |
|
727 self.version_id = self.config.version + '.1' |
699 self.sentinel = None |
728 self.sentinel = None |
700 self.write_mode = "w" |
729 self.write_mode = 'w' |
701 if self.append: |
730 if self.append: |
702 self.sentinel = FindSentinel(self.output_file) |
731 self.sentinel = FindSentinel(self.output_file) |
703 self.write_mode = "a" |
732 self.write_mode = 'a' |
704 self.valid_dates = None |
733 self.valid_dates = None |
705 if self.num_days: |
734 if self.num_days: |
706 patterns = [] |
735 patterns = [] |
707 now = PacificTime(now) |
736 now = PacificTime(now) |
708 for i in xrange(self.num_days): |
737 for i in xrange(self.num_days): |
709 then = time.gmtime(now - 24*3600 * i) |
738 then = time.gmtime(now - 24*3600 * i) |
710 patterns.append(re.escape(time.strftime("%d/%m/%Y", then))) |
739 patterns.append(re.escape(time.strftime('%d/%m/%Y', then))) |
711 patterns.append(re.escape(time.strftime("%d/%b/%Y", then))) |
740 patterns.append(re.escape(time.strftime('%d/%b/%Y', then))) |
712 self.valid_dates = re.compile(r"[^[]+\[(" + "|".join(patterns) + r"):") |
741 self.valid_dates = re.compile(r'[^[]+\[(' + '|'.join(patterns) + r'):') |
713 |
742 |
714 def DownloadLogs(self): |
743 def DownloadLogs(self): |
715 """Download the requested logs. |
744 """Download the requested logs. |
716 |
745 |
717 This will write the logs to the file designated by |
746 This will write the logs to the file designated by |
718 self.output_file, or to stdout if the filename is '-'. |
747 self.output_file, or to stdout if the filename is '-'. |
719 Multiple roundtrips to the server may be made. |
748 Multiple roundtrips to the server may be made. |
720 """ |
749 """ |
721 StatusUpdate("Downloading request logs for %s %s." % |
750 StatusUpdate('Downloading request logs for %s %s.' % |
722 (self.config.application, self.version_id)) |
751 (self.config.application, self.version_id)) |
723 tf = tempfile.TemporaryFile() |
752 tf = tempfile.TemporaryFile() |
724 offset = None |
753 offset = None |
725 try: |
754 try: |
726 while True: |
755 while True: |
727 try: |
756 try: |
728 offset = self.RequestLogLines(tf, offset) |
757 offset = self.RequestLogLines(tf, offset) |
729 if not offset: |
758 if not offset: |
730 break |
759 break |
731 except KeyboardInterrupt: |
760 except KeyboardInterrupt: |
732 StatusUpdate("Keyboard interrupt; saving data downloaded so far.") |
761 StatusUpdate('Keyboard interrupt; saving data downloaded so far.') |
733 break |
762 break |
734 StatusUpdate("Copying request logs to %r." % self.output_file) |
763 StatusUpdate('Copying request logs to %r.' % self.output_file) |
735 if self.output_file == "-": |
764 if self.output_file == '-': |
736 of = sys.stdout |
765 of = sys.stdout |
737 else: |
766 else: |
738 try: |
767 try: |
739 of = open(self.output_file, self.write_mode) |
768 of = open(self.output_file, self.write_mode) |
740 except IOError, err: |
769 except IOError, err: |
741 StatusUpdate("Can't write %r: %s." % (self.output_file, err)) |
770 StatusUpdate('Can\'t write %r: %s.' % (self.output_file, err)) |
742 sys.exit(1) |
771 sys.exit(1) |
743 try: |
772 try: |
744 line_count = CopyReversedLines(tf, of) |
773 line_count = CopyReversedLines(tf, of) |
745 finally: |
774 finally: |
746 of.flush() |
775 of.flush() |
747 if of is not sys.stdout: |
776 if of is not sys.stdout: |
748 of.close() |
777 of.close() |
749 finally: |
778 finally: |
750 tf.close() |
779 tf.close() |
751 StatusUpdate("Copied %d records." % line_count) |
780 StatusUpdate('Copied %d records.' % line_count) |
752 |
781 |
753 def RequestLogLines(self, tf, offset): |
782 def RequestLogLines(self, tf, offset): |
754 """Make a single roundtrip to the server. |
783 """Make a single roundtrip to the server. |
755 |
784 |
756 Args: |
785 Args: |
761 |
790 |
762 Returns: |
791 Returns: |
763 The offset string to be used for the next request, if another |
792 The offset string to be used for the next request, if another |
764 request should be issued; or None, if not. |
793 request should be issued; or None, if not. |
765 """ |
794 """ |
766 logging.info("Request with offset %r.", offset) |
795 logging.info('Request with offset %r.', offset) |
767 kwds = {"app_id": self.config.application, |
796 kwds = {'app_id': self.config.application, |
768 "version": self.version_id, |
797 'version': self.version_id, |
769 "limit": 100, |
798 'limit': 100, |
770 } |
799 } |
771 if offset: |
800 if offset: |
772 kwds["offset"] = offset |
801 kwds['offset'] = offset |
773 if self.severity is not None: |
802 if self.severity is not None: |
774 kwds["severity"] = str(self.severity) |
803 kwds['severity'] = str(self.severity) |
775 if self.vhost is not None: |
804 if self.vhost is not None: |
776 kwds["vhost"] = str(self.vhost) |
805 kwds['vhost'] = str(self.vhost) |
777 response = self.server.Send("/api/request_logs", payload=None, **kwds) |
806 if self.include_vhost is not None: |
778 response = response.replace("\r", "\0") |
807 kwds['include_vhost'] = str(self.include_vhost) |
|
808 response = self.server.Send('/api/request_logs', payload=None, **kwds) |
|
809 response = response.replace('\r', '\0') |
779 lines = response.splitlines() |
810 lines = response.splitlines() |
780 logging.info("Received %d bytes, %d records.", len(response), len(lines)) |
811 logging.info('Received %d bytes, %d records.', len(response), len(lines)) |
781 offset = None |
812 offset = None |
782 if lines and lines[0].startswith("#"): |
813 if lines and lines[0].startswith('#'): |
783 match = re.match(r"^#\s*next_offset=(\S+)\s*$", lines[0]) |
814 match = re.match(r'^#\s*next_offset=(\S+)\s*$', lines[0]) |
784 del lines[0] |
815 del lines[0] |
785 if match: |
816 if match: |
786 offset = match.group(1) |
817 offset = match.group(1) |
787 if lines and lines[-1].startswith("#"): |
818 if lines and lines[-1].startswith('#'): |
788 del lines[-1] |
819 del lines[-1] |
789 valid_dates = self.valid_dates |
820 valid_dates = self.valid_dates |
790 sentinel = self.sentinel |
821 sentinel = self.sentinel |
791 len_sentinel = None |
822 len_sentinel = None |
792 if sentinel: |
823 if sentinel: |
793 len_sentinel = len(sentinel) |
824 len_sentinel = len(sentinel) |
794 for line in lines: |
825 for line in lines: |
795 if ((sentinel and |
826 if ((sentinel and |
796 line.startswith(sentinel) and |
827 line.startswith(sentinel) and |
797 line[len_sentinel : len_sentinel+1] in ("", "\0")) or |
828 line[len_sentinel : len_sentinel+1] in ('', '\0')) or |
798 (valid_dates and not valid_dates.match(line))): |
829 (valid_dates and not valid_dates.match(line))): |
799 return None |
830 return None |
800 tf.write(line + "\n") |
831 tf.write(line + '\n') |
801 if not lines: |
832 if not lines: |
802 return None |
833 return None |
803 return offset |
834 return offset |
804 |
835 |
805 |
836 |
1034 file_type: the type of the files |
1065 file_type: the type of the files |
1035 """ |
1066 """ |
1036 if not files: |
1067 if not files: |
1037 return |
1068 return |
1038 |
1069 |
1039 StatusUpdate("Cloning %d %s file%s." % |
1070 StatusUpdate('Cloning %d %s file%s.' % |
1040 (len(files), file_type, len(files) != 1 and "s" or "")) |
1071 (len(files), file_type, len(files) != 1 and 's' or '')) |
1041 for i in xrange(0, len(files), MAX_FILES_TO_CLONE): |
1072 for i in xrange(0, len(files), MAX_FILES_TO_CLONE): |
1042 if i > 0 and i % MAX_FILES_TO_CLONE == 0: |
1073 if i > 0 and i % MAX_FILES_TO_CLONE == 0: |
1043 StatusUpdate("Cloned %d files." % i) |
1074 StatusUpdate('Cloned %d files.' % i) |
1044 |
1075 |
1045 chunk = files[i:min(len(files), i + MAX_FILES_TO_CLONE)] |
1076 chunk = files[i:min(len(files), i + MAX_FILES_TO_CLONE)] |
1046 result = self.server.Send(url, |
1077 result = self.server.Send(url, |
1047 app_id=self.app_id, version=self.version, |
1078 app_id=self.app_id, version=self.version, |
1048 payload=BuildClonePostBody(chunk)) |
1079 payload=BuildClonePostBody(chunk)) |
1049 if result: |
1080 if result: |
1050 files_to_upload.update(dict( |
1081 files_to_upload.update(dict( |
1051 (f, self.files[f]) for f in result.split(LIST_DELIMITER))) |
1082 (f, self.files[f]) for f in result.split(LIST_DELIMITER))) |
1052 |
1083 |
1053 CloneFiles("/api/appversion/cloneblobs", blobs_to_clone, "static") |
1084 CloneFiles('/api/appversion/cloneblobs', blobs_to_clone, 'static') |
1054 CloneFiles("/api/appversion/clonefiles", files_to_clone, "application") |
1085 CloneFiles('/api/appversion/clonefiles', files_to_clone, 'application') |
1055 |
1086 |
1056 logging.info("Files to upload: " + str(files_to_upload)) |
1087 logging.info('Files to upload: ' + str(files_to_upload)) |
1057 |
1088 |
1058 self.files = files_to_upload |
1089 self.files = files_to_upload |
1059 return sorted(files_to_upload.iterkeys()) |
1090 return sorted(files_to_upload.iterkeys()) |
1060 |
1091 |
1061 def UploadFile(self, path, file_handle): |
1092 def UploadFile(self, path, file_handle): |
1141 Exception: Deploy has not yet been called. |
1172 Exception: Deploy has not yet been called. |
1142 |
1173 |
1143 Returns: |
1174 Returns: |
1144 True if the server returned the app is ready to serve. |
1175 True if the server returned the app is ready to serve. |
1145 """ |
1176 """ |
1146 assert self.deployed, "Deploy() must be called before IsReady()." |
1177 assert self.deployed, 'Deploy() must be called before IsReady().' |
1147 |
1178 |
1148 StatusUpdate("Checking if new version is ready to serve.") |
1179 StatusUpdate('Checking if new version is ready to serve.') |
1149 result = self.server.Send("/api/appversion/isready", app_id=self.app_id, |
1180 result = self.server.Send('/api/appversion/isready', app_id=self.app_id, |
1150 version=self.version) |
1181 version=self.version) |
1151 return result == "1" |
1182 return result == '1' |
1152 |
1183 |
1153 def StartServing(self): |
1184 def StartServing(self): |
1154 """Start serving with the newly created version. |
1185 """Start serving with the newly created version. |
1155 |
1186 |
1156 Raises: |
1187 Raises: |
1157 Exception: Deploy has not yet been called. |
1188 Exception: Deploy has not yet been called. |
1158 """ |
1189 """ |
1159 assert self.deployed, "Deploy() must be called before IsReady()." |
1190 assert self.deployed, 'Deploy() must be called before IsReady().' |
1160 |
1191 |
1161 StatusUpdate("Closing update: new version is ready to start serving.") |
1192 StatusUpdate('Closing update: new version is ready to start serving.') |
1162 self.server.Send("/api/appversion/startserving", |
1193 self.server.Send('/api/appversion/startserving', |
1163 app_id=self.app_id, version=self.version) |
1194 app_id=self.app_id, version=self.version) |
1164 self.in_transaction = False |
1195 self.in_transaction = False |
1165 |
1196 |
1166 def Rollback(self): |
1197 def Rollback(self): |
1167 """Rolls back the transaction if one is in progress.""" |
1198 """Rolls back the transaction if one is in progress.""" |
1168 if not self.in_transaction: |
1199 if not self.in_transaction: |
1169 return |
1200 return |
1170 StatusUpdate("Rolling back the update.") |
1201 StatusUpdate('Rolling back the update.') |
1171 self.server.Send("/api/appversion/rollback", app_id=self.app_id, |
1202 self.server.Send('/api/appversion/rollback', app_id=self.app_id, |
1172 version=self.version) |
1203 version=self.version) |
1173 self.in_transaction = False |
1204 self.in_transaction = False |
1174 self.files = {} |
1205 self.files = {} |
1175 |
1206 |
1176 def DoUpload(self, paths, max_size, openfunc): |
1207 def DoUpload(self, paths, max_size, openfunc): |
1179 Args: |
1210 Args: |
1180 paths: An iterator that yields the relative paths of the files to upload. |
1211 paths: An iterator that yields the relative paths of the files to upload. |
1181 max_size: The maximum size file to upload. |
1212 max_size: The maximum size file to upload. |
1182 openfunc: A function that takes a path and returns a file-like object. |
1213 openfunc: A function that takes a path and returns a file-like object. |
1183 """ |
1214 """ |
1184 logging.info("Reading app configuration.") |
1215 logging.info('Reading app configuration.') |
1185 |
1216 |
1186 path = "" |
1217 path = '' |
1187 try: |
1218 try: |
1188 StatusUpdate("Scanning files on local disk.") |
1219 StatusUpdate('Scanning files on local disk.') |
1189 num_files = 0 |
1220 num_files = 0 |
1190 for path in paths: |
1221 for path in paths: |
1191 file_handle = openfunc(path) |
1222 file_handle = openfunc(path) |
1192 try: |
1223 try: |
1193 if self.config.skip_files.match(path): |
1224 if self.config.skip_files.match(path): |
1194 logging.info("Ignoring file '%s': File matches ignore regex.", |
1225 logging.info('Ignoring file \'%s\': File matches ignore regex.', |
1195 path) |
1226 path) |
1196 else: |
1227 else: |
1197 file_length = GetFileLength(file_handle) |
1228 file_length = GetFileLength(file_handle) |
1198 if file_length > max_size: |
1229 if file_length > max_size: |
1199 logging.error("Ignoring file '%s': Too long " |
1230 logging.error('Ignoring file \'%s\': Too long ' |
1200 "(max %d bytes, file is %d bytes)", |
1231 '(max %d bytes, file is %d bytes)', |
1201 path, max_size, file_length) |
1232 path, max_size, file_length) |
1202 else: |
1233 else: |
1203 logging.info("Processing file '%s'", path) |
1234 logging.info('Processing file \'%s\'', path) |
1204 self.AddFile(path, file_handle) |
1235 self.AddFile(path, file_handle) |
1205 finally: |
1236 finally: |
1206 file_handle.close() |
1237 file_handle.close() |
1207 num_files += 1 |
1238 num_files += 1 |
1208 if num_files % 500 == 0: |
1239 if num_files % 500 == 0: |
1209 StatusUpdate("Scanned %d files." % num_files) |
1240 StatusUpdate('Scanned %d files.' % num_files) |
1210 except KeyboardInterrupt: |
1241 except KeyboardInterrupt: |
1211 logging.info("User interrupted. Aborting.") |
1242 logging.info('User interrupted. Aborting.') |
1212 raise |
1243 raise |
1213 except EnvironmentError, e: |
1244 except EnvironmentError, e: |
1214 logging.error("An error occurred processing file '%s': %s. Aborting.", |
1245 logging.error('An error occurred processing file \'%s\': %s. Aborting.', |
1215 path, e) |
1246 path, e) |
1216 raise |
1247 raise |
1217 |
1248 |
1218 try: |
1249 try: |
1219 missing_files = self.Begin() |
1250 missing_files = self.Begin() |
1220 if missing_files: |
1251 if missing_files: |
1221 StatusUpdate("Uploading %d files." % len(missing_files)) |
1252 StatusUpdate('Uploading %d files.' % len(missing_files)) |
1222 num_files = 0 |
1253 num_files = 0 |
1223 for missing_file in missing_files: |
1254 for missing_file in missing_files: |
1224 logging.info("Uploading file '%s'" % missing_file) |
1255 logging.info('Uploading file \'%s\'' % missing_file) |
1225 file_handle = openfunc(missing_file) |
1256 file_handle = openfunc(missing_file) |
1226 try: |
1257 try: |
1227 self.UploadFile(missing_file, file_handle) |
1258 self.UploadFile(missing_file, file_handle) |
1228 finally: |
1259 finally: |
1229 file_handle.close() |
1260 file_handle.close() |
1230 num_files += 1 |
1261 num_files += 1 |
1231 if num_files % 500 == 0: |
1262 if num_files % 500 == 0: |
1232 StatusUpdate("Uploaded %d files." % num_files) |
1263 StatusUpdate('Uploaded %d files.' % num_files) |
1233 |
1264 |
1234 self.Commit() |
1265 self.Commit() |
1235 |
1266 |
1236 except KeyboardInterrupt: |
1267 except KeyboardInterrupt: |
1237 logging.info("User interrupted. Aborting.") |
1268 logging.info('User interrupted. Aborting.') |
1238 self.Rollback() |
1269 self.Rollback() |
1239 raise |
1270 raise |
1240 except: |
1271 except: |
1241 logging.exception("An unexpected error occurred. Aborting.") |
1272 logging.exception('An unexpected error occurred. Aborting.') |
1242 self.Rollback() |
1273 self.Rollback() |
1243 raise |
1274 raise |
1244 |
1275 |
1245 logging.info("Done!") |
1276 logging.info('Done!') |
1246 |
1277 |
1247 |
1278 |
1248 def FileIterator(base, separator=os.path.sep): |
1279 def FileIterator(base, separator=os.path.sep): |
1249 """Walks a directory tree, returning all the files. Follows symlinks. |
1280 """Walks a directory tree, returning all the files. Follows symlinks. |
1250 |
1281 |
1300 get_platform: Used for testing. |
1331 get_platform: Used for testing. |
1301 |
1332 |
1302 Returns: |
1333 Returns: |
1303 String containing the 'user-agent' header value, which includes the SDK |
1334 String containing the 'user-agent' header value, which includes the SDK |
1304 version, the platform information, and the version of Python; |
1335 version, the platform information, and the version of Python; |
1305 e.g., "appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2". |
1336 e.g., 'appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2'. |
1306 """ |
1337 """ |
1307 product_tokens = [] |
1338 product_tokens = [] |
1308 |
1339 |
1309 sdk_name = os.environ.get("APPCFG_SDK_NAME") |
1340 sdk_name = os.environ.get('APPCFG_SDK_NAME') |
1310 if sdk_name: |
1341 if sdk_name: |
1311 product_tokens.append(sdk_name) |
1342 product_tokens.append(sdk_name) |
1312 else: |
1343 else: |
1313 version = get_version() |
1344 version = get_version() |
1314 if version is None: |
1345 if version is None: |
1315 release = "unknown" |
1346 release = 'unknown' |
1316 else: |
1347 else: |
1317 release = version["release"] |
1348 release = version['release'] |
1318 |
1349 |
1319 product_tokens.append("appcfg_py/%s" % release) |
1350 product_tokens.append('appcfg_py/%s' % release) |
1320 |
1351 |
1321 product_tokens.append(get_platform()) |
1352 product_tokens.append(get_platform()) |
1322 |
1353 |
1323 python_version = ".".join(str(i) for i in sys.version_info) |
1354 python_version = '.'.join(str(i) for i in sys.version_info) |
1324 product_tokens.append("Python/%s" % python_version) |
1355 product_tokens.append('Python/%s' % python_version) |
1325 |
1356 |
1326 return " ".join(product_tokens) |
1357 return ' '.join(product_tokens) |
1327 |
1358 |
1328 |
1359 |
1329 def GetSourceName(get_version=GetVersionObject): |
1360 def GetSourceName(get_version=GetVersionObject): |
1330 """Gets the name of this source version.""" |
1361 """Gets the name of this source version.""" |
1331 version = get_version() |
1362 version = get_version() |
1332 if version is None: |
1363 if version is None: |
1333 release = "unknown" |
1364 release = 'unknown' |
1334 else: |
1365 else: |
1335 release = version["release"] |
1366 release = version['release'] |
1336 return "Google-appcfg-%s" % (release,) |
1367 return 'Google-appcfg-%s' % (release,) |
1337 |
1368 |
1338 |
1369 |
1339 class AppCfgApp(object): |
1370 class AppCfgApp(object): |
1340 """Singleton class to wrap AppCfg tool functionality. |
1371 """Singleton class to wrap AppCfg tool functionality. |
1341 |
1372 |
1452 class Formatter(optparse.IndentedHelpFormatter): |
1486 class Formatter(optparse.IndentedHelpFormatter): |
1453 """Custom help formatter that does not reformat the description.""" |
1487 """Custom help formatter that does not reformat the description.""" |
1454 |
1488 |
1455 def format_description(self, description): |
1489 def format_description(self, description): |
1456 """Very simple formatter.""" |
1490 """Very simple formatter.""" |
1457 return description + "\n" |
1491 return description + '\n' |
1458 |
1492 |
1459 desc = self._GetActionDescriptions() |
1493 desc = self._GetActionDescriptions() |
1460 desc = ("Action must be one of:\n%s" |
1494 desc = ('Action must be one of:\n%s' |
1461 "Use 'help <action>' for a detailed description.") % desc |
1495 'Use \'help <action>\' for a detailed description.') % desc |
1462 |
1496 |
1463 parser = self.parser_class(usage="%prog [options] <action>", |
1497 parser = self.parser_class(usage='%prog [options] <action>', |
1464 description=desc, |
1498 description=desc, |
1465 formatter=Formatter(), |
1499 formatter=Formatter(), |
1466 conflict_handler="resolve") |
1500 conflict_handler='resolve') |
1467 parser.add_option("-h", "--help", action="store_true", |
1501 parser.add_option('-h', '--help', action='store_true', |
1468 dest="help", help="Show the help message and exit.") |
1502 dest='help', help='Show the help message and exit.') |
1469 parser.add_option("-q", "--quiet", action="store_const", const=0, |
1503 parser.add_option('-q', '--quiet', action='store_const', const=0, |
1470 dest="verbose", help="Print errors only.") |
1504 dest='verbose', help='Print errors only.') |
1471 parser.add_option("-v", "--verbose", action="store_const", const=2, |
1505 parser.add_option('-v', '--verbose', action='store_const', const=2, |
1472 dest="verbose", default=1, |
1506 dest='verbose', default=1, |
1473 help="Print info level logs.") |
1507 help='Print info level logs.') |
1474 parser.add_option("--noisy", action="store_const", const=3, |
1508 parser.add_option('--noisy', action='store_const', const=3, |
1475 dest="verbose", help="Print all logs.") |
1509 dest='verbose', help='Print all logs.') |
1476 parser.add_option("-s", "--server", action="store", dest="server", |
1510 parser.add_option('-s', '--server', action='store', dest='server', |
1477 default="appengine.google.com", |
1511 default='appengine.google.com', |
1478 metavar="SERVER", help="The server to connect to.") |
1512 metavar='SERVER', help='The server to connect to.') |
1479 parser.add_option("--secure", action="store_true", dest="secure", |
1513 parser.add_option('--secure', action='store_true', dest='secure', |
1480 default=False, |
1514 default=False, |
1481 help="Use SSL when communicating with the server.") |
1515 help='Use SSL when communicating with the server.') |
1482 parser.add_option("-e", "--email", action="store", dest="email", |
1516 parser.add_option('-e', '--email', action='store', dest='email', |
1483 metavar="EMAIL", default=None, |
1517 metavar='EMAIL', default=None, |
1484 help="The username to use. Will prompt if omitted.") |
1518 help='The username to use. Will prompt if omitted.') |
1485 parser.add_option("-H", "--host", action="store", dest="host", |
1519 parser.add_option('-H', '--host', action='store', dest='host', |
1486 metavar="HOST", default=None, |
1520 metavar='HOST', default=None, |
1487 help="Overrides the Host header sent with all RPCs.") |
1521 help='Overrides the Host header sent with all RPCs.') |
1488 parser.add_option("--no_cookies", action="store_false", |
1522 parser.add_option('--no_cookies', action='store_false', |
1489 dest="save_cookies", default=True, |
1523 dest='save_cookies', default=True, |
1490 help="Do not save authentication cookies to local disk.") |
1524 help='Do not save authentication cookies to local disk.') |
1491 parser.add_option("--passin", action="store_true", |
1525 parser.add_option('--passin', action='store_true', |
1492 dest="passin", default=False, |
1526 dest='passin', default=False, |
1493 help="Read the login password from stdin.") |
1527 help='Read the login password from stdin.') |
1494 return parser |
1528 return parser |
1495 |
1529 |
1496 def _MakeSpecificParser(self, action): |
1530 def _MakeSpecificParser(self, action): |
1497 """Creates a new parser with documentation specific to 'action'. |
1531 """Creates a new parser with documentation specific to 'action'. |
1498 |
1532 |
1596 basepath: the directory of the application. |
1630 basepath: the directory of the application. |
1597 |
1631 |
1598 Returns: |
1632 Returns: |
1599 An AppInfoExternal object. |
1633 An AppInfoExternal object. |
1600 """ |
1634 """ |
1601 appyaml_filename = self._FindYaml(basepath, "app") |
1635 appyaml_filename = self._FindYaml(basepath, 'app') |
1602 if appyaml_filename is None: |
1636 if appyaml_filename is None: |
1603 self.parser.error("Directory does not contain an app.yaml " |
1637 self.parser.error('Directory does not contain an app.yaml ' |
1604 "configuration file.") |
1638 'configuration file.') |
1605 |
1639 |
1606 fh = open(appyaml_filename, "r") |
1640 fh = open(appyaml_filename, 'r') |
1607 try: |
1641 try: |
1608 appyaml = appinfo.LoadSingleAppInfo(fh) |
1642 appyaml = appinfo.LoadSingleAppInfo(fh) |
1609 finally: |
1643 finally: |
1610 fh.close() |
1644 fh.close() |
1611 return appyaml |
1645 return appyaml |
1612 |
1646 |
|
1647 def _ParseYamlFile(self, basepath, basename, parser): |
|
1648 """Parses the a yaml file. |
|
1649 |
|
1650 Args: |
|
1651 basepath: the directory of the application. |
|
1652 basename: the base name of the file (with the '.yaml' stripped off). |
|
1653 parser: the function or method used to parse the file. |
|
1654 |
|
1655 Returns: |
|
1656 A single parsed yaml file or None if the file does not exist. |
|
1657 """ |
|
1658 file_name = self._FindYaml(basepath, basename) |
|
1659 if file_name is not None: |
|
1660 fh = open(file_name, 'r') |
|
1661 try: |
|
1662 defns = parser(fh) |
|
1663 finally: |
|
1664 fh.close() |
|
1665 return defns |
|
1666 return None |
|
1667 |
1613 def _ParseIndexYaml(self, basepath): |
1668 def _ParseIndexYaml(self, basepath): |
1614 """Parses the index.yaml file. |
1669 """Parses the index.yaml file. |
1615 |
1670 |
1616 Args: |
1671 Args: |
1617 basepath: the directory of the application. |
1672 basepath: the directory of the application. |
1618 |
1673 |
1619 Returns: |
1674 Returns: |
1620 A single parsed yaml file or None if the file does not exist. |
1675 A single parsed yaml file or None if the file does not exist. |
1621 """ |
1676 """ |
1622 file_name = self._FindYaml(basepath, "index") |
1677 return self._ParseYamlFile(basepath, 'index', |
1623 if file_name is not None: |
1678 datastore_index.ParseIndexDefinitions) |
1624 fh = open(file_name, "r") |
|
1625 try: |
|
1626 index_defs = datastore_index.ParseIndexDefinitions(fh) |
|
1627 finally: |
|
1628 fh.close() |
|
1629 return index_defs |
|
1630 return None |
|
1631 |
1679 |
1632 def _ParseCronYaml(self, basepath): |
1680 def _ParseCronYaml(self, basepath): |
1633 """Parses the cron.yaml file. |
1681 """Parses the cron.yaml file. |
1634 |
1682 |
1635 Args: |
1683 Args: |
1636 basepath: the directory of the application. |
1684 basepath: the directory of the application. |
1637 |
1685 |
1638 Returns: |
1686 Returns: |
1639 A CronInfoExternal object. |
1687 A CronInfoExternal object or None if the file does not exist. |
1640 """ |
1688 """ |
1641 file_name = self._FindYaml(basepath, "cron") |
1689 return self._ParseYamlFile(basepath, 'cron', croninfo.LoadSingleCron) |
1642 if file_name is not None: |
1690 |
1643 fh = open(file_name, "r") |
1691 def _ParseQueueYaml(self, basepath): |
1644 try: |
1692 """Parses the queue.yaml file. |
1645 cron_info = croninfo.LoadSingleCron(fh) |
1693 |
1646 finally: |
1694 Args: |
1647 fh.close() |
1695 basepath: the directory of the application. |
1648 return cron_info |
1696 |
1649 return None |
1697 Returns: |
|
1698 A CronInfoExternal object or None if the file does not exist. |
|
1699 """ |
|
1700 return self._ParseYamlFile(basepath, 'queue', queueinfo.LoadSingleQueue) |
1650 |
1701 |
1651 def Help(self): |
1702 def Help(self): |
1652 """Prints help for a specific action. |
1703 """Prints help for a specific action. |
1653 |
1704 |
1654 Expects self.args[0] to contain the name of the action in question. |
1705 Expects self.args[0] to contain the name of the action in question. |
1655 Exits the program after printing the help message. |
1706 Exits the program after printing the help message. |
1656 """ |
1707 """ |
1657 if len(self.args) != 1 or self.args[0] not in self.actions: |
1708 if len(self.args) != 1 or self.args[0] not in self.actions: |
1658 self.parser.error("Expected a single action argument. Must be one of:\n" + |
1709 self.parser.error('Expected a single action argument. Must be one of:\n' + |
1659 self._GetActionDescriptions()) |
1710 self._GetActionDescriptions()) |
1660 |
1711 |
1661 action = self.actions[self.args[0]] |
1712 action = self.actions[self.args[0]] |
1662 self.parser, unused_options = self._MakeSpecificParser(action) |
1713 self.parser, unused_options = self._MakeSpecificParser(action) |
1663 self._PrintHelpAndExit(exit_code=0) |
1714 self._PrintHelpAndExit(exit_code=0) |
1664 |
1715 |
1665 def Update(self): |
1716 def Update(self): |
1666 """Updates and deploys a new appversion.""" |
1717 """Updates and deploys a new appversion.""" |
1667 if len(self.args) != 1: |
1718 if len(self.args) != 1: |
1668 self.parser.error("Expected a single <directory> argument.") |
1719 self.parser.error('Expected a single <directory> argument.') |
1669 |
1720 |
1670 basepath = self.args[0] |
1721 basepath = self.args[0] |
1671 appyaml = self._ParseAppYaml(basepath) |
1722 appyaml = self._ParseAppYaml(basepath) |
1672 rpc_server = self._GetRpcServer() |
1723 rpc_server = self._GetRpcServer() |
1673 |
1724 |
1674 updatecheck = self.update_check_class(rpc_server, appyaml) |
1725 updatecheck = self.update_check_class(rpc_server, appyaml) |
1675 updatecheck.CheckForUpdates() |
1726 updatecheck.CheckForUpdates() |
1676 |
1727 |
1677 appversion = AppVersionUpload(rpc_server, appyaml) |
1728 appversion = AppVersionUpload(rpc_server, appyaml) |
1678 appversion.DoUpload(FileIterator(basepath), self.options.max_size, |
1729 appversion.DoUpload(FileIterator(basepath), self.options.max_size, |
1679 lambda path: open(os.path.join(basepath, path), "rb")) |
1730 lambda path: open(os.path.join(basepath, path), 'rb')) |
1680 |
1731 |
1681 index_defs = self._ParseIndexYaml(basepath) |
1732 index_defs = self._ParseIndexYaml(basepath) |
1682 if index_defs: |
1733 if index_defs: |
1683 index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs) |
1734 index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs) |
1684 try: |
1735 try: |
1685 index_upload.DoUpload() |
1736 index_upload.DoUpload() |
1686 except urllib2.HTTPError, e: |
1737 except urllib2.HTTPError, e: |
1687 StatusUpdate("Error %d: --- begin server output ---\n" |
1738 StatusUpdate('Error %d: --- begin server output ---\n' |
1688 "%s\n--- end server output ---" % |
1739 '%s\n--- end server output ---' % |
1689 (e.code, e.read().rstrip("\n"))) |
1740 (e.code, e.read().rstrip('\n'))) |
1690 print >> self.error_fh, ( |
1741 print >> self.error_fh, ( |
1691 "Your app was updated, but there was an error updating your " |
1742 'Your app was updated, but there was an error updating your ' |
1692 "indexes. Please retry later with appcfg.py update_indexes.") |
1743 'indexes. Please retry later with appcfg.py update_indexes.') |
1693 |
1744 |
1694 cron_entries = self._ParseCronYaml(basepath) |
1745 cron_entries = self._ParseCronYaml(basepath) |
1695 if cron_entries: |
1746 if cron_entries: |
1696 cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries) |
1747 cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries) |
1697 cron_upload.DoUpload() |
1748 cron_upload.DoUpload() |
1698 |
1749 |
|
1750 queue_entries = self._ParseQueueYaml(basepath) |
|
1751 if queue_entries: |
|
1752 queue_upload = QueueEntryUpload(rpc_server, appyaml, queue_entries) |
|
1753 queue_upload.DoUpload() |
|
1754 |
1699 def _UpdateOptions(self, parser): |
1755 def _UpdateOptions(self, parser): |
1700 """Adds update-specific options to 'parser'. |
1756 """Adds update-specific options to 'parser'. |
1701 |
1757 |
1702 Args: |
1758 Args: |
1703 parser: An instance of OptionsParser. |
1759 parser: An instance of OptionsParser. |
1704 """ |
1760 """ |
1705 parser.add_option("-S", "--max_size", type="int", dest="max_size", |
1761 parser.add_option('-S', '--max_size', type='int', dest='max_size', |
1706 default=10485760, metavar="SIZE", |
1762 default=10485760, metavar='SIZE', |
1707 help="Maximum size of a file to upload.") |
1763 help='Maximum size of a file to upload.') |
1708 |
1764 |
1709 def VacuumIndexes(self): |
1765 def VacuumIndexes(self): |
1710 """Deletes unused indexes.""" |
1766 """Deletes unused indexes.""" |
1711 if len(self.args) != 1: |
1767 if len(self.args) != 1: |
1712 self.parser.error("Expected a single <directory> argument.") |
1768 self.parser.error('Expected a single <directory> argument.') |
1713 |
1769 |
1714 basepath = self.args[0] |
1770 basepath = self.args[0] |
1715 config = self._ParseAppYaml(basepath) |
1771 config = self._ParseAppYaml(basepath) |
1716 |
1772 |
1717 index_defs = self._ParseIndexYaml(basepath) |
1773 index_defs = self._ParseIndexYaml(basepath) |
1749 cron_upload.DoUpload() |
1805 cron_upload.DoUpload() |
1750 |
1806 |
1751 def UpdateIndexes(self): |
1807 def UpdateIndexes(self): |
1752 """Updates indexes.""" |
1808 """Updates indexes.""" |
1753 if len(self.args) != 1: |
1809 if len(self.args) != 1: |
1754 self.parser.error("Expected a single <directory> argument.") |
1810 self.parser.error('Expected a single <directory> argument.') |
1755 |
1811 |
1756 basepath = self.args[0] |
1812 basepath = self.args[0] |
1757 appyaml = self._ParseAppYaml(basepath) |
1813 appyaml = self._ParseAppYaml(basepath) |
1758 rpc_server = self._GetRpcServer() |
1814 rpc_server = self._GetRpcServer() |
1759 |
1815 |
1760 index_defs = self._ParseIndexYaml(basepath) |
1816 index_defs = self._ParseIndexYaml(basepath) |
1761 if index_defs: |
1817 if index_defs: |
1762 index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs) |
1818 index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs) |
1763 index_upload.DoUpload() |
1819 index_upload.DoUpload() |
1764 |
1820 |
|
1821 def UpdateQueues(self): |
|
1822 """Updates any new or changed task queue definitions.""" |
|
1823 if len(self.args) != 1: |
|
1824 self.parser.error('Expected a single <directory> argument.') |
|
1825 |
|
1826 basepath = self.args[0] |
|
1827 appyaml = self._ParseAppYaml(basepath) |
|
1828 rpc_server = self._GetRpcServer() |
|
1829 |
|
1830 queue_entries = self._ParseQueueYaml(basepath) |
|
1831 if queue_entries: |
|
1832 queue_upload = QueueEntryUpload(rpc_server, appyaml, queue_entries) |
|
1833 queue_upload.DoUpload() |
|
1834 |
1765 def Rollback(self): |
1835 def Rollback(self): |
1766 """Does a rollback of any existing transaction for this app version.""" |
1836 """Does a rollback of any existing transaction for this app version.""" |
1767 if len(self.args) != 1: |
1837 if len(self.args) != 1: |
1768 self.parser.error("Expected a single <directory> argument.") |
1838 self.parser.error('Expected a single <directory> argument.') |
1769 |
1839 |
1770 basepath = self.args[0] |
1840 basepath = self.args[0] |
1771 appyaml = self._ParseAppYaml(basepath) |
1841 appyaml = self._ParseAppYaml(basepath) |
1772 |
1842 |
1773 appversion = AppVersionUpload(self._GetRpcServer(), appyaml) |
1843 appversion = AppVersionUpload(self._GetRpcServer(), appyaml) |
1792 logs_requester = LogsRequester(rpc_server, appyaml, self.args[1], |
1862 logs_requester = LogsRequester(rpc_server, appyaml, self.args[1], |
1793 self.options.num_days, |
1863 self.options.num_days, |
1794 self.options.append, |
1864 self.options.append, |
1795 self.options.severity, |
1865 self.options.severity, |
1796 time.time(), |
1866 time.time(), |
1797 self.options.vhost) |
1867 self.options.vhost, |
|
1868 self.options.include_vhost) |
1798 logs_requester.DownloadLogs() |
1869 logs_requester.DownloadLogs() |
1799 |
1870 |
1800 def _RequestLogsOptions(self, parser): |
1871 def _RequestLogsOptions(self, parser): |
1801 """Adds request_logs-specific options to 'parser'. |
1872 """Adds request_logs-specific options to 'parser'. |
1802 |
1873 |
1803 Args: |
1874 Args: |
1804 parser: An instance of OptionsParser. |
1875 parser: An instance of OptionsParser. |
1805 """ |
1876 """ |
1806 parser.add_option("-n", "--num_days", type="int", dest="num_days", |
1877 parser.add_option('-n', '--num_days', type='int', dest='num_days', |
1807 action="store", default=None, |
1878 action='store', default=None, |
1808 help="Number of days worth of log data to get. " |
1879 help='Number of days worth of log data to get. ' |
1809 "The cut-off point is midnight UTC. " |
1880 'The cut-off point is midnight UTC. ' |
1810 "Use 0 to get all available logs. " |
1881 'Use 0 to get all available logs. ' |
1811 "Default is 1, unless --append is also given; " |
1882 'Default is 1, unless --append is also given; ' |
1812 "then the default is 0.") |
1883 'then the default is 0.') |
1813 parser.add_option("-a", "--append", dest="append", |
1884 parser.add_option('-a', '--append', dest='append', |
1814 action="store_true", default=False, |
1885 action='store_true', default=False, |
1815 help="Append to existing file.") |
1886 help='Append to existing file.') |
1816 parser.add_option("--severity", type="int", dest="severity", |
1887 parser.add_option('--severity', type='int', dest='severity', |
1817 action="store", default=None, |
1888 action='store', default=None, |
1818 help="Severity of app-level log messages to get. " |
1889 help='Severity of app-level log messages to get. ' |
1819 "The range is 0 (DEBUG) through 4 (CRITICAL). " |
1890 'The range is 0 (DEBUG) through 4 (CRITICAL). ' |
1820 "If omitted, only request logs are returned.") |
1891 'If omitted, only request logs are returned.') |
1821 parser.add_option("--vhost", type="string", dest="vhost", |
1892 parser.add_option('--vhost', type='string', dest='vhost', |
1822 action="store", default=None, |
1893 action='store', default=None, |
1823 help="The virtual host of log messages to get. " |
1894 help='The virtual host of log messages to get. ' |
1824 "If omitted, all log messages are returned.") |
1895 'If omitted, all log messages are returned.') |
|
1896 parser.add_option('--include_vhost', dest='include_vhost', |
|
1897 action='store_true', default=False, |
|
1898 help='Include virtual host in log messages.') |
1825 |
1899 |
1826 def CronInfo(self, now=None, output=sys.stdout): |
1900 def CronInfo(self, now=None, output=sys.stdout): |
1827 """Displays information about cron definitions. |
1901 """Displays information about cron definitions. |
1828 |
1902 |
1829 Args: |
1903 Args: |
1830 now: used for testing. |
1904 now: used for testing. |
1831 output: Used for testing. |
1905 output: Used for testing. |
1832 """ |
1906 """ |
1833 if len(self.args) != 1: |
1907 if len(self.args) != 1: |
1834 self.parser.error("Expected a single <directory> argument.") |
1908 self.parser.error('Expected a single <directory> argument.') |
1835 if now is None: |
1909 if now is None: |
1836 now = datetime.datetime.now() |
1910 now = datetime.datetime.now() |
1837 |
1911 |
1838 basepath = self.args[0] |
1912 basepath = self.args[0] |
1839 cron_entries = self._ParseCronYaml(basepath) |
1913 cron_entries = self._ParseCronYaml(basepath) |
1840 if cron_entries and cron_entries.cron: |
1914 if cron_entries and cron_entries.cron: |
1841 for entry in cron_entries.cron: |
1915 for entry in cron_entries.cron: |
1842 description = entry.description |
1916 description = entry.description |
1843 if not description: |
1917 if not description: |
1844 description = "<no description>" |
1918 description = '<no description>' |
1845 print >>output, "\n%s:\nURL: %s\nSchedule: %s" % (description, |
1919 print >>output, '\n%s:\nURL: %s\nSchedule: %s' % (description, |
1846 entry.url, |
1920 entry.url, |
1847 entry.schedule) |
1921 entry.schedule) |
1848 schedule = groctimespecification.GrocTimeSpecification(entry.schedule) |
1922 schedule = groctimespecification.GrocTimeSpecification(entry.schedule) |
1849 matches = schedule.GetMatches(now, self.options.num_runs) |
1923 matches = schedule.GetMatches(now, self.options.num_runs) |
1850 for match in matches: |
1924 for match in matches: |
1851 print >>output, "%s, %s from now" % ( |
1925 print >>output, '%s, %s from now' % ( |
1852 match.strftime("%Y-%m-%d %H:%M:%S"), match - now) |
1926 match.strftime('%Y-%m-%d %H:%M:%S'), match - now) |
1853 |
1927 |
1854 def _CronInfoOptions(self, parser): |
1928 def _CronInfoOptions(self, parser): |
1855 """Adds cron_info-specific options to 'parser'. |
1929 """Adds cron_info-specific options to 'parser'. |
1856 |
1930 |
1857 Args: |
1931 Args: |
1858 parser: An instance of OptionsParser. |
1932 parser: An instance of OptionsParser. |
1859 """ |
1933 """ |
1860 parser.add_option("-n", "--num_runs", type="int", dest="num_runs", |
1934 parser.add_option('-n', '--num_runs', type='int', dest='num_runs', |
1861 action="store", default=5, |
1935 action='store', default=5, |
1862 help="Number of runs of each cron job to display" |
1936 help='Number of runs of each cron job to display' |
1863 "Default is 5") |
1937 'Default is 5') |
1864 |
1938 |
1865 def _CheckRequiredLoadOptions(self): |
1939 def _CheckRequiredLoadOptions(self): |
1866 """Checks that upload/download options are present.""" |
1940 """Checks that upload/download options are present.""" |
1867 for option in ["filename", "kind", "config_file"]: |
1941 for option in ['filename', 'kind', 'config_file']: |
1868 if getattr(self.options, option) is None: |
1942 if getattr(self.options, option) is None: |
1869 self.parser.error("Option '%s' is required." % option) |
1943 self.parser.error('Option \'%s\' is required.' % option) |
1870 if not self.options.url: |
1944 if not self.options.url: |
1871 self.parser.error("You must have google.appengine.ext.remote_api.handler " |
1945 self.parser.error('You must have google.appengine.ext.remote_api.handler ' |
1872 "assigned to an endpoint in app.yaml, or provide " |
1946 'assigned to an endpoint in app.yaml, or provide ' |
1873 "the url of the handler via the 'url' option.") |
1947 'the url of the handler via the \'url\' option.') |
1874 |
1948 |
1875 def InferRemoteApiUrl(self, appyaml): |
1949 def InferRemoteApiUrl(self, appyaml): |
1876 """Uses app.yaml to determine the remote_api endpoint. |
1950 """Uses app.yaml to determine the remote_api endpoint. |
1877 |
1951 |
1878 Args: |
1952 Args: |
2000 """Adds options common to 'upload_data' and 'download_data'. |
2074 """Adds options common to 'upload_data' and 'download_data'. |
2001 |
2075 |
2002 Args: |
2076 Args: |
2003 parser: An instance of OptionsParser. |
2077 parser: An instance of OptionsParser. |
2004 """ |
2078 """ |
2005 parser.add_option("--filename", type="string", dest="filename", |
2079 parser.add_option('--filename', type='string', dest='filename', |
2006 action="store", |
2080 action='store', |
2007 help="The name of the file containing the input data." |
2081 help='The name of the file containing the input data.' |
2008 " (Required)") |
2082 ' (Required)') |
2009 parser.add_option("--config_file", type="string", dest="config_file", |
2083 parser.add_option('--config_file', type='string', dest='config_file', |
2010 action="store", |
2084 action='store', |
2011 help="Name of the configuration file. (Required)") |
2085 help='Name of the configuration file. (Required)') |
2012 parser.add_option("--kind", type="string", dest="kind", |
2086 parser.add_option('--kind', type='string', dest='kind', |
2013 action="store", |
2087 action='store', |
2014 help="The kind of the entities to store. (Required)") |
2088 help='The kind of the entities to store. (Required)') |
2015 parser.add_option("--url", type="string", dest="url", |
2089 parser.add_option('--url', type='string', dest='url', |
2016 action="store", |
2090 action='store', |
2017 help="The location of the remote_api endpoint.") |
2091 help='The location of the remote_api endpoint.') |
2018 parser.add_option("--num_threads", type="int", dest="num_threads", |
2092 parser.add_option('--num_threads', type='int', dest='num_threads', |
2019 action="store", default=10, |
2093 action='store', default=10, |
2020 help="Number of threads to upload records with.") |
2094 help='Number of threads to upload records with.') |
2021 parser.add_option("--batch_size", type="int", dest="batch_size", |
2095 parser.add_option('--batch_size', type='int', dest='batch_size', |
2022 action="store", default=10, |
2096 action='store', default=10, |
2023 help="Number of records to post in each request.") |
2097 help='Number of records to post in each request.') |
2024 parser.add_option("--bandwidth_limit", type="int", dest="bandwidth_limit", |
2098 parser.add_option('--bandwidth_limit', type='int', dest='bandwidth_limit', |
2025 action="store", default=250000, |
2099 action='store', default=250000, |
2026 help="The maximum bytes/second bandwidth for transfers.") |
2100 help='The maximum bytes/second bandwidth for transfers.') |
2027 parser.add_option("--rps_limit", type="int", dest="rps_limit", |
2101 parser.add_option('--rps_limit', type='int', dest='rps_limit', |
2028 action="store", default=20, |
2102 action='store', default=20, |
2029 help="The maximum records/second for transfers.") |
2103 help='The maximum records/second for transfers.') |
2030 parser.add_option("--http_limit", type="int", dest="http_limit", |
2104 parser.add_option('--http_limit', type='int', dest='http_limit', |
2031 action="store", default=8, |
2105 action='store', default=8, |
2032 help="The maximum requests/second for transfers.") |
2106 help='The maximum requests/second for transfers.') |
2033 parser.add_option("--db_filename", type="string", dest="db_filename", |
2107 parser.add_option('--db_filename', type='string', dest='db_filename', |
2034 action="store", |
2108 action='store', |
2035 help="Name of the progress database file.") |
2109 help='Name of the progress database file.') |
2036 parser.add_option("--auth_domain", type="string", dest="auth_domain", |
2110 parser.add_option('--auth_domain', type='string', dest='auth_domain', |
2037 action="store", default="gmail.com", |
2111 action='store', default='gmail.com', |
2038 help="The name of the authorization domain to use.") |
2112 help='The name of the authorization domain to use.') |
2039 parser.add_option("--log_file", type="string", dest="log_file", |
2113 parser.add_option('--log_file', type='string', dest='log_file', |
2040 help="File to write bulkloader logs. If not supplied " |
2114 help='File to write bulkloader logs. If not supplied ' |
2041 "then a new log file will be created, named: " |
2115 'then a new log file will be created, named: ' |
2042 "bulkloader-log-TIMESTAMP.") |
2116 'bulkloader-log-TIMESTAMP.') |
2043 |
2117 |
2044 def _PerformUploadOptions(self, parser): |
2118 def _PerformUploadOptions(self, parser): |
2045 """Adds 'upload_data' specific options to the 'parser' passed in. |
2119 """Adds 'upload_data' specific options to the 'parser' passed in. |
2046 |
2120 |
2047 Args: |
2121 Args: |
2048 parser: An instance of OptionsParser. |
2122 parser: An instance of OptionsParser. |
2049 """ |
2123 """ |
2050 self._PerformLoadOptions(parser) |
2124 self._PerformLoadOptions(parser) |
2051 parser.add_option("--has_header", dest="has_header", |
2125 parser.add_option('--has_header', dest='has_header', |
2052 action="store_true", default=False, |
2126 action='store_true', default=False, |
2053 help="Whether the first line of the input file should be" |
2127 help='Whether the first line of the input file should be' |
2054 " skipped") |
2128 ' skipped') |
2055 parser.add_option("--loader_opts", type="string", dest="loader_opts", |
2129 parser.add_option('--loader_opts', type='string', dest='loader_opts', |
2056 help="A string to pass to the Loader.initialize method.") |
2130 help='A string to pass to the Loader.initialize method.') |
2057 |
2131 |
2058 def _PerformDownloadOptions(self, parser): |
2132 def _PerformDownloadOptions(self, parser): |
2059 """Adds 'download_data' specific options to the 'parser' passed in. |
2133 """Adds 'download_data' specific options to the 'parser' passed in. |
2060 |
2134 |
2061 Args: |
2135 Args: |
2062 parser: An instance of OptionsParser. |
2136 parser: An instance of OptionsParser. |
2063 """ |
2137 """ |
2064 self._PerformLoadOptions(parser) |
2138 self._PerformLoadOptions(parser) |
2065 parser.add_option("--exporter_opts", type="string", dest="exporter_opts", |
2139 parser.add_option('--exporter_opts', type='string', dest='exporter_opts', |
2066 help="A string to pass to the Exporter.initialize method." |
2140 help='A string to pass to the Exporter.initialize method.' |
2067 ) |
2141 ) |
2068 parser.add_option("--result_db_filename", type="string", |
2142 parser.add_option('--result_db_filename', type='string', |
2069 dest="result_db_filename", |
2143 dest='result_db_filename', |
2070 action="store", |
2144 action='store', |
2071 help="Database to write entities to for download.") |
2145 help='Database to write entities to for download.') |
2072 |
2146 |
2073 class Action(object): |
2147 class Action(object): |
2074 """Contains information about a command line action. |
2148 """Contains information about a command line action. |
2075 |
2149 |
2076 Attributes: |
2150 Attributes: |
2095 |
2169 |
2096 def __call__(self, appcfg): |
2170 def __call__(self, appcfg): |
2097 """Invoke this Action on the specified AppCfg. |
2171 """Invoke this Action on the specified AppCfg. |
2098 |
2172 |
2099 This calls the function of the appropriate name on AppCfg, and |
2173 This calls the function of the appropriate name on AppCfg, and |
2100 respects polymophic overrides.""" |
2174 respects polymophic overrides. |
|
2175 |
|
2176 Args: |
|
2177 appcfg: The appcfg to use. |
|
2178 Returns: |
|
2179 The result of the function call. |
|
2180 """ |
2101 method = getattr(appcfg, self.function) |
2181 method = getattr(appcfg, self.function) |
2102 return method() |
2182 return method() |
2103 |
2183 |
2104 actions = { |
2184 actions = { |
2105 |
2185 |
2106 "help": Action( |
2186 'help': Action( |
2107 function="Help", |
2187 function='Help', |
2108 usage="%prog help <action>", |
2188 usage='%prog help <action>', |
2109 short_desc="Print help for a specific action."), |
2189 short_desc='Print help for a specific action.'), |
2110 |
2190 |
2111 "update": Action( |
2191 'update': Action( |
2112 function="Update", |
2192 function='Update', |
2113 usage="%prog [options] update <directory>", |
2193 usage='%prog [options] update <directory>', |
2114 options=_UpdateOptions, |
2194 options=_UpdateOptions, |
2115 short_desc="Create or update an app version.", |
2195 short_desc='Create or update an app version.', |
2116 long_desc=""" |
2196 long_desc=""" |
2117 Specify a directory that contains all of the files required by |
2197 Specify a directory that contains all of the files required by |
2118 the app, and appcfg.py will create/update the app version referenced |
2198 the app, and appcfg.py will create/update the app version referenced |
2119 in the app.yaml file at the top level of that directory. appcfg.py |
2199 in the app.yaml file at the top level of that directory. appcfg.py |
2120 will follow symlinks and recursively upload all files to the server. |
2200 will follow symlinks and recursively upload all files to the server. |
2121 Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""), |
2201 Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""), |
2122 |
2202 |
2123 "update_cron": Action( |
2203 'update_cron': Action( |
2124 function="UpdateCron", |
2204 function='UpdateCron', |
2125 usage="%prog [options] update_cron <directory>", |
2205 usage='%prog [options] update_cron <directory>', |
2126 short_desc="Update application cron definitions.", |
2206 short_desc='Update application cron definitions.', |
2127 long_desc=""" |
2207 long_desc=""" |
2128 The 'update_cron' command will update any new, removed or changed cron |
2208 The 'update_cron' command will update any new, removed or changed cron |
2129 definitions from the cron.yaml file."""), |
2209 definitions from the optional cron.yaml file."""), |
2130 |
2210 |
2131 "update_indexes": Action( |
2211 'update_indexes': Action( |
2132 function="UpdateIndexes", |
2212 function='UpdateIndexes', |
2133 usage="%prog [options] update_indexes <directory>", |
2213 usage='%prog [options] update_indexes <directory>', |
2134 short_desc="Update application indexes.", |
2214 short_desc='Update application indexes.', |
2135 long_desc=""" |
2215 long_desc=""" |
2136 The 'update_indexes' command will add additional indexes which are not currently |
2216 The 'update_indexes' command will add additional indexes which are not currently |
2137 in production as well as restart any indexes that were not completed."""), |
2217 in production as well as restart any indexes that were not completed."""), |
2138 |
2218 |
2139 "vacuum_indexes": Action( |
2219 'update_queues': Action( |
2140 function="VacuumIndexes", |
2220 function='UpdateQueues', |
2141 usage="%prog [options] vacuum_indexes <directory>", |
2221 usage='%prog [options] update_queues <directory>', |
|
2222 short_desc='Update application task queue definitions.', |
|
2223 long_desc=""" |
|
2224 The 'update_queue' command will update any new, removed or changed task queue |
|
2225 definitions from the optional queue.yaml file."""), |
|
2226 |
|
2227 'vacuum_indexes': Action( |
|
2228 function='VacuumIndexes', |
|
2229 usage='%prog [options] vacuum_indexes <directory>', |
2142 options=_VacuumIndexesOptions, |
2230 options=_VacuumIndexesOptions, |
2143 short_desc="Delete unused indexes from application.", |
2231 short_desc='Delete unused indexes from application.', |
2144 long_desc=""" |
2232 long_desc=""" |
2145 The 'vacuum_indexes' command will help clean up indexes which are no longer |
2233 The 'vacuum_indexes' command will help clean up indexes which are no longer |
2146 in use. It does this by comparing the local index configuration with |
2234 in use. It does this by comparing the local index configuration with |
2147 indexes that are actually defined on the server. If any indexes on the |
2235 indexes that are actually defined on the server. If any indexes on the |
2148 server do not exist in the index configuration file, the user is given the |
2236 server do not exist in the index configuration file, the user is given the |
2149 option to delete them."""), |
2237 option to delete them."""), |
2150 |
2238 |
2151 "rollback": Action( |
2239 'rollback': Action( |
2152 function="Rollback", |
2240 function='Rollback', |
2153 usage="%prog [options] rollback <directory>", |
2241 usage='%prog [options] rollback <directory>', |
2154 short_desc="Rollback an in-progress update.", |
2242 short_desc='Rollback an in-progress update.', |
2155 long_desc=""" |
2243 long_desc=""" |
2156 The 'update' command requires a server-side transaction. Use 'rollback' |
2244 The 'update' command requires a server-side transaction. Use 'rollback' |
2157 if you get an error message about another transaction being in progress |
2245 if you get an error message about another transaction being in progress |
2158 and you are sure that there is no such transaction."""), |
2246 and you are sure that there is no such transaction."""), |
2159 |
2247 |
2160 "request_logs": Action( |
2248 'request_logs': Action( |
2161 function="RequestLogs", |
2249 function='RequestLogs', |
2162 usage="%prog [options] request_logs <directory> <output_file>", |
2250 usage='%prog [options] request_logs <directory> <output_file>', |
2163 options=_RequestLogsOptions, |
2251 options=_RequestLogsOptions, |
2164 short_desc="Write request logs in Apache common log format.", |
2252 short_desc='Write request logs in Apache common log format.', |
2165 long_desc=""" |
2253 long_desc=""" |
2166 The 'request_logs' command exports the request logs from your application |
2254 The 'request_logs' command exports the request logs from your application |
2167 to a file. It will write Apache common log format records ordered |
2255 to a file. It will write Apache common log format records ordered |
2168 chronologically. If output file is '-' stdout will be written."""), |
2256 chronologically. If output file is '-' stdout will be written."""), |
2169 |
2257 |
2170 "cron_info": Action( |
2258 'cron_info': Action( |
2171 function="CronInfo", |
2259 function='CronInfo', |
2172 usage="%prog [options] cron_info <directory>", |
2260 usage='%prog [options] cron_info <directory>', |
2173 options=_CronInfoOptions, |
2261 options=_CronInfoOptions, |
2174 short_desc="Display information about cron jobs.", |
2262 short_desc='Display information about cron jobs.', |
2175 long_desc=""" |
2263 long_desc=""" |
2176 The 'cron_info' command will display the next 'number' runs (default 5) for |
2264 The 'cron_info' command will display the next 'number' runs (default 5) for |
2177 each cron job defined in the cron.yaml file."""), |
2265 each cron job defined in the cron.yaml file."""), |
2178 |
2266 |
2179 "upload_data": Action( |
2267 'upload_data': Action( |
2180 function="PerformUpload", |
2268 function='PerformUpload', |
2181 usage="%prog [options] upload_data <directory>", |
2269 usage='%prog [options] upload_data <directory>', |
2182 options=_PerformUploadOptions, |
2270 options=_PerformUploadOptions, |
2183 short_desc="Upload data records to datastore.", |
2271 short_desc='Upload data records to datastore.', |
2184 long_desc=""" |
2272 long_desc=""" |
2185 The 'upload_data' command translates input records into datastore entities and |
2273 The 'upload_data' command translates input records into datastore entities and |
2186 uploads them into your application's datastore."""), |
2274 uploads them into your application's datastore."""), |
2187 |
2275 |
2188 "download_data": Action( |
2276 'download_data': Action( |
2189 function="PerformDownload", |
2277 function='PerformDownload', |
2190 usage="%prog [options] download_data <directory>", |
2278 usage='%prog [options] download_data <directory>', |
2191 options=_PerformDownloadOptions, |
2279 options=_PerformDownloadOptions, |
2192 short_desc="Download entities from datastore.", |
2280 short_desc='Download entities from datastore.', |
2193 long_desc=""" |
2281 long_desc=""" |
2194 The 'download_data' command downloads datastore entities and writes them to |
2282 The 'download_data' command downloads datastore entities and writes them to |
2195 file as CSV or developer defined format."""), |
2283 file as CSV or developer defined format."""), |
2196 |
2284 |
2197 |
2285 |
2198 |
2286 |
2199 } |
2287 } |
2200 |
2288 |
2201 |
2289 |
2202 def main(argv): |
2290 def main(argv): |
2203 logging.basicConfig(format=("%(asctime)s %(levelname)s %(filename)s:" |
2291 logging.basicConfig(format=('%(asctime)s %(levelname)s %(filename)s:' |
2204 "%(lineno)s %(message)s ")) |
2292 '%(lineno)s %(message)s ')) |
2205 try: |
2293 try: |
2206 result = AppCfgApp(argv).Run() |
2294 result = AppCfgApp(argv).Run() |
2207 if result: |
2295 if result: |
2208 sys.exit(result) |
2296 sys.exit(result) |
2209 except KeyboardInterrupt: |
2297 except KeyboardInterrupt: |
2210 StatusUpdate("Interrupted.") |
2298 StatusUpdate('Interrupted.') |
2211 sys.exit(1) |
2299 sys.exit(1) |
2212 |
2300 |
2213 |
2301 |
2214 if __name__ == "__main__": |
2302 if __name__ == '__main__': |
2215 main(sys.argv) |
2303 main(sys.argv) |