3
|
1 |
#!/usr/bin/env python
|
|
2 |
import sys
|
|
3 |
import os
|
|
4 |
import errno
|
|
5 |
import stat
|
|
6 |
import optparse
|
|
7 |
import pkg_resources
|
|
8 |
import urllib2
|
|
9 |
import urllib
|
|
10 |
import mimetypes
|
|
11 |
import zipfile
|
|
12 |
import tarfile
|
|
13 |
import tempfile
|
|
14 |
import subprocess
|
|
15 |
import posixpath
|
|
16 |
import re
|
|
17 |
import shutil
|
|
18 |
import fnmatch
|
|
19 |
import operator
|
|
20 |
import copy
|
|
21 |
try:
|
|
22 |
from hashlib import md5
|
|
23 |
except ImportError:
|
|
24 |
import md5 as md5_module
|
|
25 |
md5 = md5_module.new
|
|
26 |
import urlparse
|
|
27 |
from email.FeedParser import FeedParser
|
|
28 |
import traceback
|
|
29 |
from cStringIO import StringIO
|
|
30 |
import socket
|
|
31 |
from Queue import Queue
|
|
32 |
from Queue import Empty as QueueEmpty
|
|
33 |
import threading
|
|
34 |
import httplib
|
|
35 |
import time
|
|
36 |
import logging
|
|
37 |
import ConfigParser
|
|
38 |
from distutils.util import strtobool
|
|
39 |
from distutils import sysconfig
|
|
40 |
|
|
41 |
class InstallationError(Exception):
|
|
42 |
"""General exception during installation"""
|
|
43 |
|
|
44 |
class UninstallationError(Exception):
|
|
45 |
"""General exception during uninstallation"""
|
|
46 |
|
|
47 |
class DistributionNotFound(InstallationError):
|
|
48 |
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
|
49 |
|
|
50 |
class BadCommand(Exception):
|
|
51 |
"""Raised when virtualenv or a command is not found"""
|
|
52 |
|
|
53 |
try:
|
|
54 |
any
|
|
55 |
except NameError:
|
|
56 |
def any(seq):
|
|
57 |
for item in seq:
|
|
58 |
if item:
|
|
59 |
return True
|
|
60 |
return False
|
|
61 |
|
|
62 |
if getattr(sys, 'real_prefix', None):
|
|
63 |
## FIXME: is build/ a good name?
|
|
64 |
build_prefix = os.path.join(sys.prefix, 'build')
|
|
65 |
src_prefix = os.path.join(sys.prefix, 'src')
|
|
66 |
else:
|
|
67 |
## FIXME: this isn't a very good default
|
|
68 |
build_prefix = os.path.join(os.getcwd(), 'build')
|
|
69 |
src_prefix = os.path.join(os.getcwd(), 'src')
|
|
70 |
|
|
71 |
# FIXME doesn't account for venv linked to global site-packages
|
|
72 |
|
|
73 |
site_packages = sysconfig.get_python_lib()
|
|
74 |
user_dir = os.path.expanduser('~')
|
|
75 |
if sys.platform == 'win32':
|
|
76 |
bin_py = os.path.join(sys.prefix, 'Scripts')
|
|
77 |
# buildout uses 'bin' on Windows too?
|
|
78 |
if not os.path.exists(bin_py):
|
|
79 |
bin_py = os.path.join(sys.prefix, 'bin')
|
|
80 |
config_dir = os.environ.get('APPDATA', user_dir) # Use %APPDATA% for roaming
|
|
81 |
default_config_file = os.path.join(config_dir, 'pip', 'pip.ini')
|
|
82 |
else:
|
|
83 |
bin_py = os.path.join(sys.prefix, 'bin')
|
|
84 |
default_config_file = os.path.join(user_dir, '.pip', 'pip.conf')
|
|
85 |
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
|
|
86 |
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
|
|
87 |
bin_py = '/usr/local/bin'
|
|
88 |
|
|
89 |
class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
|
|
90 |
"""Custom help formatter for use in ConfigOptionParser that updates
|
|
91 |
the defaults before expanding them, allowing them to show up correctly
|
|
92 |
in the help listing"""
|
|
93 |
|
|
94 |
def expand_default(self, option):
|
|
95 |
if self.parser is not None:
|
|
96 |
self.parser.update_defaults(self.parser.defaults)
|
|
97 |
return optparse.IndentedHelpFormatter.expand_default(self, option)
|
|
98 |
|
|
99 |
|
|
100 |
class ConfigOptionParser(optparse.OptionParser):
|
|
101 |
"""Custom option parser which updates its defaults by by checking the
|
|
102 |
configuration files and environmental variables"""
|
|
103 |
|
|
104 |
def __init__(self, *args, **kwargs):
|
|
105 |
self.config = ConfigParser.RawConfigParser()
|
|
106 |
self.name = kwargs.pop('name')
|
|
107 |
self.files = self.get_config_files()
|
|
108 |
self.config.read(self.files)
|
|
109 |
assert self.name
|
|
110 |
optparse.OptionParser.__init__(self, *args, **kwargs)
|
|
111 |
|
|
112 |
def get_config_files(self):
|
|
113 |
config_file = os.environ.get('PIP_CONFIG_FILE', False)
|
|
114 |
if config_file and os.path.exists(config_file):
|
|
115 |
return [config_file]
|
|
116 |
return [default_config_file]
|
|
117 |
|
|
118 |
def update_defaults(self, defaults):
|
|
119 |
"""Updates the given defaults with values from the config files and
|
|
120 |
the environ. Does a little special handling for certain types of
|
|
121 |
options (lists)."""
|
|
122 |
# Then go and look for the other sources of configuration:
|
|
123 |
config = {}
|
|
124 |
# 1. config files
|
|
125 |
for section in ('global', self.name):
|
|
126 |
config.update(dict(self.get_config_section(section)))
|
|
127 |
# 2. environmental variables
|
|
128 |
config.update(dict(self.get_environ_vars()))
|
|
129 |
# Then set the options with those values
|
|
130 |
for key, val in config.iteritems():
|
|
131 |
key = key.replace('_', '-')
|
|
132 |
if not key.startswith('--'):
|
|
133 |
key = '--%s' % key # only prefer long opts
|
|
134 |
option = self.get_option(key)
|
|
135 |
if option is not None:
|
|
136 |
# ignore empty values
|
|
137 |
if not val:
|
|
138 |
continue
|
|
139 |
# handle multiline configs
|
|
140 |
if option.action == 'append':
|
|
141 |
val = val.split()
|
|
142 |
else:
|
|
143 |
option.nargs = 1
|
|
144 |
if option.action in ('store_true', 'store_false', 'count'):
|
|
145 |
val = strtobool(val)
|
|
146 |
try:
|
|
147 |
val = option.convert_value(key, val)
|
|
148 |
except optparse.OptionValueError, e:
|
|
149 |
print ("An error occured during configuration: %s" % e)
|
|
150 |
sys.exit(3)
|
|
151 |
defaults[option.dest] = val
|
|
152 |
return defaults
|
|
153 |
|
|
154 |
def get_config_section(self, name):
|
|
155 |
"""Get a section of a configuration"""
|
|
156 |
if self.config.has_section(name):
|
|
157 |
return self.config.items(name)
|
|
158 |
return []
|
|
159 |
|
|
160 |
def get_environ_vars(self, prefix='PIP_'):
|
|
161 |
"""Returns a generator with all environmental vars with prefix PIP_"""
|
|
162 |
for key, val in os.environ.iteritems():
|
|
163 |
if key.startswith(prefix):
|
|
164 |
yield (key.replace(prefix, '').lower(), val)
|
|
165 |
|
|
166 |
def get_default_values(self):
|
|
167 |
"""Overridding to make updating the defaults after instantiation of
|
|
168 |
the option parser possible, update_defaults() does the dirty work."""
|
|
169 |
if not self.process_default_values:
|
|
170 |
# Old, pre-Optik 1.5 behaviour.
|
|
171 |
return optparse.Values(self.defaults)
|
|
172 |
|
|
173 |
defaults = self.update_defaults(self.defaults.copy()) # ours
|
|
174 |
for option in self._get_all_options():
|
|
175 |
default = defaults.get(option.dest)
|
|
176 |
if isinstance(default, basestring):
|
|
177 |
opt_str = option.get_opt_string()
|
|
178 |
defaults[option.dest] = option.check_value(opt_str, default)
|
|
179 |
return optparse.Values(defaults)
|
|
180 |
|
|
181 |
try:
|
|
182 |
pip_dist = pkg_resources.get_distribution('pip')
|
|
183 |
version = '%s from %s (python %s)' % (
|
|
184 |
pip_dist, pip_dist.location, sys.version[:3])
|
|
185 |
except pkg_resources.DistributionNotFound:
|
|
186 |
# when running pip.py without installing
|
|
187 |
version=None
|
|
188 |
|
|
189 |
def rmtree_errorhandler(func, path, exc_info):
|
|
190 |
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
|
|
191 |
remove them, an exception is thrown. We catch that here, remove the
|
|
192 |
read-only attribute, and hopefully continue without problems."""
|
|
193 |
exctype, value = exc_info[:2]
|
|
194 |
# lookin for a windows error
|
|
195 |
if exctype is not WindowsError or 'Access is denied' not in str(value):
|
|
196 |
raise
|
|
197 |
# file type should currently be read only
|
|
198 |
if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD):
|
|
199 |
raise
|
|
200 |
# convert to read/write
|
|
201 |
os.chmod(path, stat.S_IWRITE)
|
|
202 |
# use the original function to repeat the operation
|
|
203 |
func(path)
|
|
204 |
|
|
205 |
class VcsSupport(object):
|
|
206 |
_registry = {}
|
|
207 |
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp']
|
|
208 |
|
|
209 |
def __init__(self):
|
|
210 |
# Register more schemes with urlparse for various version control systems
|
|
211 |
urlparse.uses_netloc.extend(self.schemes)
|
|
212 |
urlparse.uses_fragment.extend(self.schemes)
|
|
213 |
super(VcsSupport, self).__init__()
|
|
214 |
|
|
215 |
def __iter__(self):
|
|
216 |
return self._registry.__iter__()
|
|
217 |
|
|
218 |
@property
|
|
219 |
def backends(self):
|
|
220 |
return self._registry.values()
|
|
221 |
|
|
222 |
@property
|
|
223 |
def dirnames(self):
|
|
224 |
return [backend.dirname for backend in self.backends]
|
|
225 |
|
|
226 |
@property
|
|
227 |
def all_schemes(self):
|
|
228 |
schemes = []
|
|
229 |
for backend in self.backends:
|
|
230 |
schemes.extend(backend.schemes)
|
|
231 |
return schemes
|
|
232 |
|
|
233 |
def register(self, cls):
|
|
234 |
if not hasattr(cls, 'name'):
|
|
235 |
logger.warn('Cannot register VCS %s' % cls.__name__)
|
|
236 |
return
|
|
237 |
if cls.name not in self._registry:
|
|
238 |
self._registry[cls.name] = cls
|
|
239 |
|
|
240 |
def unregister(self, cls=None, name=None):
|
|
241 |
if name in self._registry:
|
|
242 |
del self._registry[name]
|
|
243 |
elif cls in self._registry.values():
|
|
244 |
del self._registry[cls.name]
|
|
245 |
else:
|
|
246 |
logger.warn('Cannot unregister because no class or name given')
|
|
247 |
|
|
248 |
def get_backend_name(self, location):
|
|
249 |
"""
|
|
250 |
Return the name of the version control backend if found at given
|
|
251 |
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
|
|
252 |
"""
|
|
253 |
for vc_type in self._registry.values():
|
|
254 |
path = os.path.join(location, vc_type.dirname)
|
|
255 |
if os.path.exists(path):
|
|
256 |
return vc_type.name
|
|
257 |
return None
|
|
258 |
|
|
259 |
def get_backend(self, name):
|
|
260 |
name = name.lower()
|
|
261 |
if name in self._registry:
|
|
262 |
return self._registry[name]
|
|
263 |
|
|
264 |
def get_backend_from_location(self, location):
|
|
265 |
vc_type = self.get_backend_name(location)
|
|
266 |
if vc_type:
|
|
267 |
return self.get_backend(vc_type)
|
|
268 |
return None
|
|
269 |
|
|
270 |
vcs = VcsSupport()
|
|
271 |
|
|
272 |
parser = ConfigOptionParser(
|
|
273 |
usage='%prog COMMAND [OPTIONS]',
|
|
274 |
version=version,
|
|
275 |
add_help_option=False,
|
|
276 |
formatter=UpdatingDefaultsHelpFormatter(),
|
|
277 |
name='global')
|
|
278 |
|
|
279 |
parser.add_option(
|
|
280 |
'-h', '--help',
|
|
281 |
dest='help',
|
|
282 |
action='store_true',
|
|
283 |
help='Show help')
|
|
284 |
parser.add_option(
|
|
285 |
'-E', '--environment',
|
|
286 |
dest='venv',
|
|
287 |
metavar='DIR',
|
|
288 |
help='virtualenv environment to run pip in (either give the '
|
|
289 |
'interpreter or the environment base directory)')
|
|
290 |
parser.add_option(
|
|
291 |
'-s', '--enable-site-packages',
|
|
292 |
dest='site_packages',
|
|
293 |
action='store_true',
|
|
294 |
help='Include site-packages in virtualenv if one is to be '
|
|
295 |
'created. Ignored if --environment is not used or '
|
|
296 |
'the virtualenv already exists.')
|
|
297 |
parser.add_option(
|
|
298 |
# Defines a default root directory for virtualenvs, relative
|
|
299 |
# virtualenvs names/paths are considered relative to it.
|
|
300 |
'--virtualenv-base',
|
|
301 |
dest='venv_base',
|
|
302 |
type='str',
|
|
303 |
default='',
|
|
304 |
help=optparse.SUPPRESS_HELP)
|
|
305 |
parser.add_option(
|
|
306 |
# Run only if inside a virtualenv, bail if not.
|
|
307 |
'--require-virtualenv', '--require-venv',
|
|
308 |
dest='require_venv',
|
|
309 |
action='store_true',
|
|
310 |
default=False,
|
|
311 |
help=optparse.SUPPRESS_HELP)
|
|
312 |
parser.add_option(
|
|
313 |
# Use automatically an activated virtualenv instead of installing
|
|
314 |
# globally. -E will be ignored if used.
|
|
315 |
'--respect-virtualenv', '--respect-venv',
|
|
316 |
dest='respect_venv',
|
|
317 |
action='store_true',
|
|
318 |
default=False,
|
|
319 |
help=optparse.SUPPRESS_HELP)
|
|
320 |
|
|
321 |
parser.add_option(
|
|
322 |
'-v', '--verbose',
|
|
323 |
dest='verbose',
|
|
324 |
action='count',
|
|
325 |
default=0,
|
|
326 |
help='Give more output')
|
|
327 |
parser.add_option(
|
|
328 |
'-q', '--quiet',
|
|
329 |
dest='quiet',
|
|
330 |
action='count',
|
|
331 |
default=0,
|
|
332 |
help='Give less output')
|
|
333 |
parser.add_option(
|
|
334 |
'--log',
|
|
335 |
dest='log',
|
|
336 |
metavar='FILENAME',
|
|
337 |
help='Log file where a complete (maximum verbosity) record will be kept')
|
|
338 |
parser.add_option(
|
|
339 |
# Writes the log levels explicitely to the log'
|
|
340 |
'--log-explicit-levels',
|
|
341 |
dest='log_explicit_levels',
|
|
342 |
action='store_true',
|
|
343 |
default=False,
|
|
344 |
help=optparse.SUPPRESS_HELP)
|
|
345 |
parser.add_option(
|
|
346 |
# The default log file
|
|
347 |
'--local-log', '--log-file',
|
|
348 |
dest='log_file',
|
|
349 |
metavar='FILENAME',
|
|
350 |
default='./pip-log.txt',
|
|
351 |
help=optparse.SUPPRESS_HELP)
|
|
352 |
|
|
353 |
parser.add_option(
|
|
354 |
'--proxy',
|
|
355 |
dest='proxy',
|
|
356 |
type='str',
|
|
357 |
default='',
|
|
358 |
help="Specify a proxy in the form user:passwd@proxy.server:port. "
|
|
359 |
"Note that the user:password@ is optional and required only if you "
|
|
360 |
"are behind an authenticated proxy. If you provide "
|
|
361 |
"user@proxy.server:port then you will be prompted for a password.")
|
|
362 |
parser.add_option(
|
|
363 |
'--timeout', '--default-timeout',
|
|
364 |
metavar='SECONDS',
|
|
365 |
dest='timeout',
|
|
366 |
type='float',
|
|
367 |
default=15,
|
|
368 |
help='Set the socket timeout (default %default seconds)')
|
|
369 |
parser.add_option(
|
|
370 |
# The default version control system for editables, e.g. 'svn'
|
|
371 |
'--default-vcs',
|
|
372 |
dest='default_vcs',
|
|
373 |
type='str',
|
|
374 |
default='',
|
|
375 |
help=optparse.SUPPRESS_HELP)
|
|
376 |
parser.add_option(
|
|
377 |
# A regex to be used to skip requirements
|
|
378 |
'--skip-requirements-regex',
|
|
379 |
dest='skip_requirements_regex',
|
|
380 |
type='str',
|
|
381 |
default='',
|
|
382 |
help=optparse.SUPPRESS_HELP)
|
|
383 |
|
|
384 |
parser.disable_interspersed_args()
|
|
385 |
|
|
386 |
_commands = {}
|
|
387 |
|
|
388 |
class Command(object):
|
|
389 |
name = None
|
|
390 |
usage = None
|
|
391 |
hidden = False
|
|
392 |
def __init__(self):
|
|
393 |
assert self.name
|
|
394 |
self.parser = ConfigOptionParser(
|
|
395 |
usage=self.usage,
|
|
396 |
prog='%s %s' % (sys.argv[0], self.name),
|
|
397 |
version=parser.version,
|
|
398 |
formatter=UpdatingDefaultsHelpFormatter(),
|
|
399 |
name=self.name)
|
|
400 |
for option in parser.option_list:
|
|
401 |
if not option.dest or option.dest == 'help':
|
|
402 |
# -h, --version, etc
|
|
403 |
continue
|
|
404 |
self.parser.add_option(option)
|
|
405 |
_commands[self.name] = self
|
|
406 |
|
|
407 |
def merge_options(self, initial_options, options):
|
|
408 |
# Make sure we have all global options carried over
|
|
409 |
for attr in ['log', 'venv', 'proxy', 'venv_base', 'require_venv',
|
|
410 |
'respect_venv', 'log_explicit_levels', 'log_file',
|
|
411 |
'timeout', 'default_vcs', 'skip_requirements_regex']:
|
|
412 |
setattr(options, attr, getattr(initial_options, attr) or getattr(options, attr))
|
|
413 |
options.quiet += initial_options.quiet
|
|
414 |
options.verbose += initial_options.verbose
|
|
415 |
|
|
416 |
def main(self, complete_args, args, initial_options):
|
|
417 |
global logger
|
|
418 |
options, args = self.parser.parse_args(args)
|
|
419 |
self.merge_options(initial_options, options)
|
|
420 |
|
|
421 |
if options.require_venv and not options.venv:
|
|
422 |
# If a venv is required check if it can really be found
|
|
423 |
if not os.environ.get('VIRTUAL_ENV'):
|
|
424 |
print 'Could not find an activated virtualenv (required).'
|
|
425 |
sys.exit(3)
|
|
426 |
# Automatically install in currently activated venv if required
|
|
427 |
options.respect_venv = True
|
|
428 |
|
|
429 |
if args and args[-1] == '___VENV_RESTART___':
|
|
430 |
## FIXME: We don't do anything this this value yet:
|
|
431 |
venv_location = args[-2]
|
|
432 |
args = args[:-2]
|
|
433 |
options.venv = None
|
|
434 |
else:
|
|
435 |
# If given the option to respect the activated environment
|
|
436 |
# check if no venv is given as a command line parameter
|
|
437 |
if options.respect_venv and os.environ.get('VIRTUAL_ENV'):
|
|
438 |
if options.venv and os.path.exists(options.venv):
|
|
439 |
# Make sure command line venv and environmental are the same
|
|
440 |
if (os.path.realpath(os.path.expanduser(options.venv)) !=
|
|
441 |
os.path.realpath(os.environ.get('VIRTUAL_ENV'))):
|
|
442 |
print ("Given virtualenv (%s) doesn't match "
|
|
443 |
"currently activated virtualenv (%s)."
|
|
444 |
% (options.venv, os.environ.get('VIRTUAL_ENV')))
|
|
445 |
sys.exit(3)
|
|
446 |
else:
|
|
447 |
options.venv = os.environ.get('VIRTUAL_ENV')
|
|
448 |
print 'Using already activated environment %s' % options.venv
|
|
449 |
level = 1 # Notify
|
|
450 |
level += options.verbose
|
|
451 |
level -= options.quiet
|
|
452 |
level = Logger.level_for_integer(4-level)
|
|
453 |
complete_log = []
|
|
454 |
logger = Logger([(level, sys.stdout),
|
|
455 |
(Logger.DEBUG, complete_log.append)])
|
|
456 |
if options.log_explicit_levels:
|
|
457 |
logger.explicit_levels = True
|
|
458 |
if options.venv:
|
|
459 |
if options.verbose > 0:
|
|
460 |
# The logger isn't setup yet
|
|
461 |
print 'Running in environment %s' % options.venv
|
|
462 |
site_packages=False
|
|
463 |
if options.site_packages:
|
|
464 |
site_packages=True
|
|
465 |
restart_in_venv(options.venv, options.venv_base, site_packages,
|
|
466 |
complete_args)
|
|
467 |
# restart_in_venv should actually never return, but for clarity...
|
|
468 |
return
|
|
469 |
## FIXME: not sure if this sure come before or after venv restart
|
|
470 |
if options.log:
|
|
471 |
log_fp = open_logfile_append(options.log)
|
|
472 |
logger.consumers.append((logger.DEBUG, log_fp))
|
|
473 |
else:
|
|
474 |
log_fp = None
|
|
475 |
|
|
476 |
socket.setdefaulttimeout(options.timeout or None)
|
|
477 |
|
|
478 |
setup_proxy_handler(options.proxy)
|
|
479 |
|
|
480 |
exit = 0
|
|
481 |
try:
|
|
482 |
self.run(options, args)
|
|
483 |
except (InstallationError, UninstallationError), e:
|
|
484 |
logger.fatal(str(e))
|
|
485 |
logger.info('Exception information:\n%s' % format_exc())
|
|
486 |
exit = 1
|
|
487 |
except:
|
|
488 |
logger.fatal('Exception:\n%s' % format_exc())
|
|
489 |
exit = 2
|
|
490 |
|
|
491 |
if log_fp is not None:
|
|
492 |
log_fp.close()
|
|
493 |
if exit:
|
|
494 |
log_fn = options.log_file
|
|
495 |
text = '\n'.join(complete_log)
|
|
496 |
logger.fatal('Storing complete log in %s' % log_fn)
|
|
497 |
log_fp = open_logfile_append(log_fn)
|
|
498 |
log_fp.write(text)
|
|
499 |
log_fp.close()
|
|
500 |
return exit
|
|
501 |
|
|
502 |
class HelpCommand(Command):
|
|
503 |
name = 'help'
|
|
504 |
usage = '%prog'
|
|
505 |
summary = 'Show available commands'
|
|
506 |
|
|
507 |
def run(self, options, args):
|
|
508 |
if args:
|
|
509 |
## FIXME: handle errors better here
|
|
510 |
command = args[0]
|
|
511 |
if command not in _commands:
|
|
512 |
raise InstallationError('No command with the name: %s' % command)
|
|
513 |
command = _commands[command]
|
|
514 |
command.parser.print_help()
|
|
515 |
return
|
|
516 |
parser.print_help()
|
|
517 |
print
|
|
518 |
print 'Commands available:'
|
|
519 |
commands = list(set(_commands.values()))
|
|
520 |
commands.sort(key=lambda x: x.name)
|
|
521 |
for command in commands:
|
|
522 |
if command.hidden:
|
|
523 |
continue
|
|
524 |
print ' %s: %s' % (command.name, command.summary)
|
|
525 |
|
|
526 |
HelpCommand()
|
|
527 |
|
|
528 |
|
|
529 |
class InstallCommand(Command):
|
|
530 |
name = 'install'
|
|
531 |
usage = '%prog [OPTIONS] PACKAGE_NAMES...'
|
|
532 |
summary = 'Install packages'
|
|
533 |
bundle = False
|
|
534 |
|
|
535 |
def __init__(self):
|
|
536 |
super(InstallCommand, self).__init__()
|
|
537 |
self.parser.add_option(
|
|
538 |
'-e', '--editable',
|
|
539 |
dest='editables',
|
|
540 |
action='append',
|
|
541 |
default=[],
|
|
542 |
metavar='VCS+REPOS_URL[@REV]#egg=PACKAGE',
|
|
543 |
help='Install a package directly from a checkout. Source will be checked '
|
|
544 |
'out into src/PACKAGE (lower-case) and installed in-place (using '
|
|
545 |
'setup.py develop). You can run this on an existing directory/checkout (like '
|
|
546 |
'pip install -e src/mycheckout). This option may be provided multiple times. '
|
|
547 |
'Possible values for VCS are: svn, git, hg and bzr.')
|
|
548 |
self.parser.add_option(
|
|
549 |
'-r', '--requirement',
|
|
550 |
dest='requirements',
|
|
551 |
action='append',
|
|
552 |
default=[],
|
|
553 |
metavar='FILENAME',
|
|
554 |
help='Install all the packages listed in the given requirements file. '
|
|
555 |
'This option can be used multiple times.')
|
|
556 |
self.parser.add_option(
|
|
557 |
'-f', '--find-links',
|
|
558 |
dest='find_links',
|
|
559 |
action='append',
|
|
560 |
default=[],
|
|
561 |
metavar='URL',
|
|
562 |
help='URL to look for packages at')
|
|
563 |
self.parser.add_option(
|
|
564 |
'-i', '--index-url', '--pypi-url',
|
|
565 |
dest='index_url',
|
|
566 |
metavar='URL',
|
|
567 |
default='http://pypi.python.org/simple',
|
|
568 |
help='Base URL of Python Package Index (default %default)')
|
|
569 |
self.parser.add_option(
|
|
570 |
'--extra-index-url',
|
|
571 |
dest='extra_index_urls',
|
|
572 |
metavar='URL',
|
|
573 |
action='append',
|
|
574 |
default=[],
|
|
575 |
help='Extra URLs of package indexes to use in addition to --index-url')
|
|
576 |
self.parser.add_option(
|
|
577 |
'--no-index',
|
|
578 |
dest='no_index',
|
|
579 |
action='store_true',
|
|
580 |
default=False,
|
|
581 |
help='Ignore package index (only looking at --find-links URLs instead)')
|
|
582 |
|
|
583 |
self.parser.add_option(
|
|
584 |
'-b', '--build', '--build-dir', '--build-directory',
|
|
585 |
dest='build_dir',
|
|
586 |
metavar='DIR',
|
|
587 |
default=None,
|
|
588 |
help='Unpack packages into DIR (default %s) and build from there' % build_prefix)
|
|
589 |
self.parser.add_option(
|
|
590 |
'-d', '--download', '--download-dir', '--download-directory',
|
|
591 |
dest='download_dir',
|
|
592 |
metavar='DIR',
|
|
593 |
default=None,
|
|
594 |
help='Download packages into DIR instead of installing them')
|
|
595 |
self.parser.add_option(
|
|
596 |
'--download-cache',
|
|
597 |
dest='download_cache',
|
|
598 |
metavar='DIR',
|
|
599 |
default=None,
|
|
600 |
help='Cache downloaded packages in DIR')
|
|
601 |
self.parser.add_option(
|
|
602 |
'--src', '--source', '--source-dir', '--source-directory',
|
|
603 |
dest='src_dir',
|
|
604 |
metavar='DIR',
|
|
605 |
default=None,
|
|
606 |
help='Check out --editable packages into DIR (default %s)' % src_prefix)
|
|
607 |
|
|
608 |
self.parser.add_option(
|
|
609 |
'-U', '--upgrade',
|
|
610 |
dest='upgrade',
|
|
611 |
action='store_true',
|
|
612 |
help='Upgrade all packages to the newest available version')
|
|
613 |
self.parser.add_option(
|
|
614 |
'-I', '--ignore-installed',
|
|
615 |
dest='ignore_installed',
|
|
616 |
action='store_true',
|
|
617 |
help='Ignore the installed packages (reinstalling instead)')
|
|
618 |
self.parser.add_option(
|
|
619 |
'--no-deps', '--no-dependencies',
|
|
620 |
dest='ignore_dependencies',
|
|
621 |
action='store_true',
|
|
622 |
default=False,
|
|
623 |
help='Ignore package dependencies')
|
|
624 |
self.parser.add_option(
|
|
625 |
'--no-install',
|
|
626 |
dest='no_install',
|
|
627 |
action='store_true',
|
|
628 |
help="Download and unpack all packages, but don't actually install them")
|
|
629 |
|
|
630 |
self.parser.add_option(
|
|
631 |
'--install-option',
|
|
632 |
dest='install_options',
|
|
633 |
action='append',
|
|
634 |
help="Extra arguments to be supplied to the setup.py install "
|
|
635 |
"command (use like --install-option=\"--install-scripts=/usr/local/bin\"). "
|
|
636 |
"Use multiple --install-option options to pass multiple options to setup.py install. "
|
|
637 |
"If you are using an option with a directory path, be sure to use absolute path.")
|
|
638 |
|
|
639 |
def run(self, options, args):
|
|
640 |
if not options.build_dir:
|
|
641 |
options.build_dir = build_prefix
|
|
642 |
if not options.src_dir:
|
|
643 |
options.src_dir = src_prefix
|
|
644 |
if options.download_dir:
|
|
645 |
options.no_install = True
|
|
646 |
options.ignore_installed = True
|
|
647 |
else:
|
|
648 |
options.build_dir = os.path.abspath(options.build_dir)
|
|
649 |
options.src_dir = os.path.abspath(options.src_dir)
|
|
650 |
install_options = options.install_options or []
|
|
651 |
index_urls = [options.index_url] + options.extra_index_urls
|
|
652 |
if options.no_index:
|
|
653 |
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
|
|
654 |
index_urls = []
|
|
655 |
finder = PackageFinder(
|
|
656 |
find_links=options.find_links,
|
|
657 |
index_urls=index_urls)
|
|
658 |
requirement_set = RequirementSet(
|
|
659 |
build_dir=options.build_dir,
|
|
660 |
src_dir=options.src_dir,
|
|
661 |
download_dir=options.download_dir,
|
|
662 |
download_cache=options.download_cache,
|
|
663 |
upgrade=options.upgrade,
|
|
664 |
ignore_installed=options.ignore_installed,
|
|
665 |
ignore_dependencies=options.ignore_dependencies)
|
|
666 |
for name in args:
|
|
667 |
requirement_set.add_requirement(
|
|
668 |
InstallRequirement.from_line(name, None))
|
|
669 |
for name in options.editables:
|
|
670 |
requirement_set.add_requirement(
|
|
671 |
InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
|
|
672 |
for filename in options.requirements:
|
|
673 |
for req in parse_requirements(filename, finder=finder, options=options):
|
|
674 |
requirement_set.add_requirement(req)
|
|
675 |
requirement_set.install_files(finder, force_root_egg_info=self.bundle)
|
|
676 |
if not options.no_install and not self.bundle:
|
|
677 |
requirement_set.install(install_options)
|
|
678 |
installed = ' '.join([req.name for req in
|
|
679 |
requirement_set.successfully_installed])
|
|
680 |
if installed:
|
|
681 |
logger.notify('Successfully installed %s' % installed)
|
|
682 |
elif not self.bundle:
|
|
683 |
downloaded = ' '.join([req.name for req in
|
|
684 |
requirement_set.successfully_downloaded])
|
|
685 |
if downloaded:
|
|
686 |
logger.notify('Successfully downloaded %s' % downloaded)
|
|
687 |
return requirement_set
|
|
688 |
|
|
689 |
InstallCommand()
|
|
690 |
|
|
691 |
class UninstallCommand(Command):
|
|
692 |
name = 'uninstall'
|
|
693 |
usage = '%prog [OPTIONS] PACKAGE_NAMES ...'
|
|
694 |
summary = 'Uninstall packages'
|
|
695 |
|
|
696 |
def __init__(self):
|
|
697 |
super(UninstallCommand, self).__init__()
|
|
698 |
self.parser.add_option(
|
|
699 |
'-r', '--requirement',
|
|
700 |
dest='requirements',
|
|
701 |
action='append',
|
|
702 |
default=[],
|
|
703 |
metavar='FILENAME',
|
|
704 |
help='Uninstall all the packages listed in the given requirements file. '
|
|
705 |
'This option can be used multiple times.')
|
|
706 |
self.parser.add_option(
|
|
707 |
'-y', '--yes',
|
|
708 |
dest='yes',
|
|
709 |
action='store_true',
|
|
710 |
help="Don't ask for confirmation of uninstall deletions.")
|
|
711 |
|
|
712 |
def run(self, options, args):
|
|
713 |
requirement_set = RequirementSet(
|
|
714 |
build_dir=None,
|
|
715 |
src_dir=None,
|
|
716 |
download_dir=None)
|
|
717 |
for name in args:
|
|
718 |
requirement_set.add_requirement(
|
|
719 |
InstallRequirement.from_line(name))
|
|
720 |
for filename in options.requirements:
|
|
721 |
for req in parse_requirements(filename, options=options):
|
|
722 |
requirement_set.add_requirement(req)
|
|
723 |
requirement_set.uninstall(auto_confirm=options.yes)
|
|
724 |
|
|
725 |
UninstallCommand()
|
|
726 |
|
|
727 |
class BundleCommand(InstallCommand):
|
|
728 |
name = 'bundle'
|
|
729 |
usage = '%prog [OPTIONS] BUNDLE_NAME.pybundle PACKAGE_NAMES...'
|
|
730 |
summary = 'Create pybundles (archives containing multiple packages)'
|
|
731 |
bundle = True
|
|
732 |
|
|
733 |
def __init__(self):
|
|
734 |
super(BundleCommand, self).__init__()
|
|
735 |
|
|
736 |
def run(self, options, args):
|
|
737 |
if not args:
|
|
738 |
raise InstallationError('You must give a bundle filename')
|
|
739 |
if not options.build_dir:
|
|
740 |
options.build_dir = backup_dir(build_prefix, '-bundle')
|
|
741 |
if not options.src_dir:
|
|
742 |
options.src_dir = backup_dir(src_prefix, '-bundle')
|
|
743 |
# We have to get everything when creating a bundle:
|
|
744 |
options.ignore_installed = True
|
|
745 |
logger.notify('Putting temporary build files in %s and source/develop files in %s'
|
|
746 |
% (display_path(options.build_dir), display_path(options.src_dir)))
|
|
747 |
bundle_filename = args[0]
|
|
748 |
args = args[1:]
|
|
749 |
requirement_set = super(BundleCommand, self).run(options, args)
|
|
750 |
# FIXME: here it has to do something
|
|
751 |
requirement_set.create_bundle(bundle_filename)
|
|
752 |
logger.notify('Created bundle in %s' % bundle_filename)
|
|
753 |
return requirement_set
|
|
754 |
|
|
755 |
BundleCommand()
|
|
756 |
|
|
757 |
|
|
758 |
class FreezeCommand(Command):
|
|
759 |
name = 'freeze'
|
|
760 |
usage = '%prog [OPTIONS]'
|
|
761 |
summary = 'Output all currently installed packages (exact versions) to stdout'
|
|
762 |
|
|
763 |
def __init__(self):
|
|
764 |
super(FreezeCommand, self).__init__()
|
|
765 |
self.parser.add_option(
|
|
766 |
'-r', '--requirement',
|
|
767 |
dest='requirement',
|
|
768 |
action='store',
|
|
769 |
default=None,
|
|
770 |
metavar='FILENAME',
|
|
771 |
help='Use the given requirements file as a hint about how to generate the new frozen requirements')
|
|
772 |
self.parser.add_option(
|
|
773 |
'-f', '--find-links',
|
|
774 |
dest='find_links',
|
|
775 |
action='append',
|
|
776 |
default=[],
|
|
777 |
metavar='URL',
|
|
778 |
help='URL for finding packages, which will be added to the frozen requirements file')
|
|
779 |
|
|
780 |
def run(self, options, args):
|
|
781 |
requirement = options.requirement
|
|
782 |
find_links = options.find_links or []
|
|
783 |
## FIXME: Obviously this should be settable:
|
|
784 |
find_tags = False
|
|
785 |
skip_match = None
|
|
786 |
|
|
787 |
skip_regex = options.skip_requirements_regex
|
|
788 |
if skip_regex:
|
|
789 |
skip_match = re.compile(skip_regex)
|
|
790 |
|
|
791 |
logger.move_stdout_to_stderr()
|
|
792 |
dependency_links = []
|
|
793 |
|
|
794 |
f = sys.stdout
|
|
795 |
|
|
796 |
for dist in pkg_resources.working_set:
|
|
797 |
if dist.has_metadata('dependency_links.txt'):
|
|
798 |
dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
|
|
799 |
for link in find_links:
|
|
800 |
if '#egg=' in link:
|
|
801 |
dependency_links.append(link)
|
|
802 |
for link in find_links:
|
|
803 |
f.write('-f %s\n' % link)
|
|
804 |
installations = {}
|
|
805 |
for dist in pkg_resources.working_set:
|
|
806 |
if dist.key in ('setuptools', 'pip', 'python'):
|
|
807 |
## FIXME: also skip virtualenv?
|
|
808 |
continue
|
|
809 |
req = FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
|
|
810 |
installations[req.name] = req
|
|
811 |
if requirement:
|
|
812 |
req_f = open(requirement)
|
|
813 |
for line in req_f:
|
|
814 |
if not line.strip() or line.strip().startswith('#'):
|
|
815 |
f.write(line)
|
|
816 |
continue
|
|
817 |
if skip_match and skip_match.search(line):
|
|
818 |
f.write(line)
|
|
819 |
continue
|
|
820 |
elif line.startswith('-e') or line.startswith('--editable'):
|
|
821 |
if line.startswith('-e'):
|
|
822 |
line = line[2:].strip()
|
|
823 |
else:
|
|
824 |
line = line[len('--editable'):].strip().lstrip('=')
|
|
825 |
line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
|
|
826 |
elif (line.startswith('-r') or line.startswith('--requirement')
|
|
827 |
or line.startswith('-Z') or line.startswith('--always-unzip')):
|
|
828 |
logger.debug('Skipping line %r' % line.strip())
|
|
829 |
continue
|
|
830 |
else:
|
|
831 |
line_req = InstallRequirement.from_line(line)
|
|
832 |
if not line_req.name:
|
|
833 |
logger.notify("Skipping line because it's not clear what it would install: %s"
|
|
834 |
% line.strip())
|
|
835 |
logger.notify(" (add #egg=PackageName to the URL to avoid this warning)")
|
|
836 |
continue
|
|
837 |
if line_req.name not in installations:
|
|
838 |
logger.warn("Requirement file contains %s, but that package is not installed"
|
|
839 |
% line.strip())
|
|
840 |
continue
|
|
841 |
f.write(str(installations[line_req.name]))
|
|
842 |
del installations[line_req.name]
|
|
843 |
f.write('## The following requirements were added by pip --freeze:\n')
|
|
844 |
for installation in sorted(installations.values(), key=lambda x: x.name):
|
|
845 |
f.write(str(installation))
|
|
846 |
|
|
847 |
FreezeCommand()
|
|
848 |
|
|
849 |
class ZipCommand(Command):
|
|
850 |
name = 'zip'
|
|
851 |
usage = '%prog [OPTIONS] PACKAGE_NAMES...'
|
|
852 |
summary = 'Zip individual packages'
|
|
853 |
|
|
854 |
def __init__(self):
|
|
855 |
super(ZipCommand, self).__init__()
|
|
856 |
if self.name == 'zip':
|
|
857 |
self.parser.add_option(
|
|
858 |
'--unzip',
|
|
859 |
action='store_true',
|
|
860 |
dest='unzip',
|
|
861 |
help='Unzip (rather than zip) a package')
|
|
862 |
else:
|
|
863 |
self.parser.add_option(
|
|
864 |
'--zip',
|
|
865 |
action='store_false',
|
|
866 |
dest='unzip',
|
|
867 |
default=True,
|
|
868 |
help='Zip (rather than unzip) a package')
|
|
869 |
self.parser.add_option(
|
|
870 |
'--no-pyc',
|
|
871 |
action='store_true',
|
|
872 |
dest='no_pyc',
|
|
873 |
help='Do not include .pyc files in zip files (useful on Google App Engine)')
|
|
874 |
self.parser.add_option(
|
|
875 |
'-l', '--list',
|
|
876 |
action='store_true',
|
|
877 |
dest='list',
|
|
878 |
help='List the packages available, and their zip status')
|
|
879 |
self.parser.add_option(
|
|
880 |
'--sort-files',
|
|
881 |
action='store_true',
|
|
882 |
dest='sort_files',
|
|
883 |
help='With --list, sort packages according to how many files they contain')
|
|
884 |
self.parser.add_option(
|
|
885 |
'--path',
|
|
886 |
action='append',
|
|
887 |
dest='paths',
|
|
888 |
help='Restrict operations to the given paths (may include wildcards)')
|
|
889 |
self.parser.add_option(
|
|
890 |
'-n', '--simulate',
|
|
891 |
action='store_true',
|
|
892 |
help='Do not actually perform the zip/unzip operation')
|
|
893 |
|
|
894 |
def paths(self):
|
|
895 |
"""All the entries of sys.path, possibly restricted by --path"""
|
|
896 |
if not self.select_paths:
|
|
897 |
return sys.path
|
|
898 |
result = []
|
|
899 |
match_any = set()
|
|
900 |
for path in sys.path:
|
|
901 |
path = os.path.normcase(os.path.abspath(path))
|
|
902 |
for match in self.select_paths:
|
|
903 |
match = os.path.normcase(os.path.abspath(match))
|
|
904 |
if '*' in match:
|
|
905 |
if re.search(fnmatch.translate(match+'*'), path):
|
|
906 |
result.append(path)
|
|
907 |
match_any.add(match)
|
|
908 |
break
|
|
909 |
else:
|
|
910 |
if path.startswith(match):
|
|
911 |
result.append(path)
|
|
912 |
match_any.add(match)
|
|
913 |
break
|
|
914 |
else:
|
|
915 |
logger.debug("Skipping path %s because it doesn't match %s"
|
|
916 |
% (path, ', '.join(self.select_paths)))
|
|
917 |
for match in self.select_paths:
|
|
918 |
if match not in match_any and '*' not in match:
|
|
919 |
result.append(match)
|
|
920 |
logger.debug("Adding path %s because it doesn't match anything already on sys.path"
|
|
921 |
% match)
|
|
922 |
return result
|
|
923 |
|
|
924 |
def run(self, options, args):
|
|
925 |
self.select_paths = options.paths
|
|
926 |
self.simulate = options.simulate
|
|
927 |
if options.list:
|
|
928 |
return self.list(options, args)
|
|
929 |
if not args:
|
|
930 |
raise InstallationError(
|
|
931 |
'You must give at least one package to zip or unzip')
|
|
932 |
packages = []
|
|
933 |
for arg in args:
|
|
934 |
module_name, filename = self.find_package(arg)
|
|
935 |
if options.unzip and os.path.isdir(filename):
|
|
936 |
raise InstallationError(
|
|
937 |
'The module %s (in %s) is not a zip file; cannot be unzipped'
|
|
938 |
% (module_name, filename))
|
|
939 |
elif not options.unzip and not os.path.isdir(filename):
|
|
940 |
raise InstallationError(
|
|
941 |
'The module %s (in %s) is not a directory; cannot be zipped'
|
|
942 |
% (module_name, filename))
|
|
943 |
packages.append((module_name, filename))
|
|
944 |
last_status = None
|
|
945 |
for module_name, filename in packages:
|
|
946 |
if options.unzip:
|
|
947 |
last_status = self.unzip_package(module_name, filename)
|
|
948 |
else:
|
|
949 |
last_status = self.zip_package(module_name, filename, options.no_pyc)
|
|
950 |
return last_status
|
|
951 |
|
|
952 |
def unzip_package(self, module_name, filename):
|
|
953 |
zip_filename = os.path.dirname(filename)
|
|
954 |
if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
|
|
955 |
raise InstallationError(
|
|
956 |
'Module %s (in %s) isn\'t located in a zip file in %s'
|
|
957 |
% (module_name, filename, zip_filename))
|
|
958 |
package_path = os.path.dirname(zip_filename)
|
|
959 |
if not package_path in self.paths():
|
|
960 |
logger.warn(
|
|
961 |
'Unpacking %s into %s, but %s is not on sys.path'
|
|
962 |
% (display_path(zip_filename), display_path(package_path),
|
|
963 |
display_path(package_path)))
|
|
964 |
logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename)))
|
|
965 |
if self.simulate:
|
|
966 |
logger.notify('Skipping remaining operations because of --simulate')
|
|
967 |
return
|
|
968 |
logger.indent += 2
|
|
969 |
try:
|
|
970 |
## FIXME: this should be undoable:
|
|
971 |
zip = zipfile.ZipFile(zip_filename)
|
|
972 |
to_save = []
|
|
973 |
for name in zip.namelist():
|
|
974 |
if name.startswith('%s/' % module_name):
|
|
975 |
content = zip.read(name)
|
|
976 |
dest = os.path.join(package_path, name)
|
|
977 |
if not os.path.exists(os.path.dirname(dest)):
|
|
978 |
os.makedirs(os.path.dirname(dest))
|
|
979 |
if not content and dest.endswith('/'):
|
|
980 |
if not os.path.exists(dest):
|
|
981 |
os.makedirs(dest)
|
|
982 |
else:
|
|
983 |
f = open(dest, 'wb')
|
|
984 |
f.write(content)
|
|
985 |
f.close()
|
|
986 |
else:
|
|
987 |
to_save.append((name, zip.read(name)))
|
|
988 |
zip.close()
|
|
989 |
if not to_save:
|
|
990 |
logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
|
|
991 |
os.unlink(zip_filename)
|
|
992 |
self.remove_filename_from_pth(zip_filename)
|
|
993 |
else:
|
|
994 |
logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
|
|
995 |
zip = zipfile.ZipFile(zip_filename, 'w')
|
|
996 |
for name, content in to_save:
|
|
997 |
zip.writestr(name, content)
|
|
998 |
zip.close()
|
|
999 |
finally:
|
|
1000 |
logger.indent -= 2
|
|
1001 |
|
|
1002 |
def zip_package(self, module_name, filename, no_pyc):
|
|
1003 |
orig_filename = filename
|
|
1004 |
logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
|
|
1005 |
logger.indent += 2
|
|
1006 |
if filename.endswith('.egg'):
|
|
1007 |
dest_filename = filename
|
|
1008 |
else:
|
|
1009 |
dest_filename = filename + '.zip'
|
|
1010 |
try:
|
|
1011 |
## FIXME: I think this needs to be undoable:
|
|
1012 |
if filename == dest_filename:
|
|
1013 |
filename = backup_dir(orig_filename)
|
|
1014 |
logger.notify('Moving %s aside to %s' % (orig_filename, filename))
|
|
1015 |
if not self.simulate:
|
|
1016 |
shutil.move(orig_filename, filename)
|
|
1017 |
try:
|
|
1018 |
logger.info('Creating zip file in %s' % display_path(dest_filename))
|
|
1019 |
if not self.simulate:
|
|
1020 |
zip = zipfile.ZipFile(dest_filename, 'w')
|
|
1021 |
zip.writestr(module_name + '/', '')
|
|
1022 |
for dirpath, dirnames, filenames in os.walk(filename):
|
|
1023 |
if no_pyc:
|
|
1024 |
filenames = [f for f in filenames
|
|
1025 |
if not f.lower().endswith('.pyc')]
|
|
1026 |
for fns, is_dir in [(dirnames, True), (filenames, False)]:
|
|
1027 |
for fn in fns:
|
|
1028 |
full = os.path.join(dirpath, fn)
|
|
1029 |
dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn)
|
|
1030 |
if is_dir:
|
|
1031 |
zip.writestr(dest+'/', '')
|
|
1032 |
else:
|
|
1033 |
zip.write(full, dest)
|
|
1034 |
zip.close()
|
|
1035 |
logger.info('Removing old directory %s' % display_path(filename))
|
|
1036 |
if not self.simulate:
|
|
1037 |
shutil.rmtree(filename)
|
|
1038 |
except:
|
|
1039 |
## FIXME: need to do an undo here
|
|
1040 |
raise
|
|
1041 |
## FIXME: should also be undone:
|
|
1042 |
self.add_filename_to_pth(dest_filename)
|
|
1043 |
finally:
|
|
1044 |
logger.indent -= 2
|
|
1045 |
|
|
1046 |
def remove_filename_from_pth(self, filename):
|
|
1047 |
for pth in self.pth_files():
|
|
1048 |
f = open(pth, 'r')
|
|
1049 |
lines = f.readlines()
|
|
1050 |
f.close()
|
|
1051 |
new_lines = [
|
|
1052 |
l for l in lines if l.strip() != filename]
|
|
1053 |
if lines != new_lines:
|
|
1054 |
logger.info('Removing reference to %s from .pth file %s'
|
|
1055 |
% (display_path(filename), display_path(pth)))
|
|
1056 |
if not filter(None, new_lines):
|
|
1057 |
logger.info('%s file would be empty: deleting' % display_path(pth))
|
|
1058 |
if not self.simulate:
|
|
1059 |
os.unlink(pth)
|
|
1060 |
else:
|
|
1061 |
if not self.simulate:
|
|
1062 |
f = open(pth, 'w')
|
|
1063 |
f.writelines(new_lines)
|
|
1064 |
f.close()
|
|
1065 |
return
|
|
1066 |
logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename))
|
|
1067 |
|
|
1068 |
def add_filename_to_pth(self, filename):
|
|
1069 |
path = os.path.dirname(filename)
|
|
1070 |
dest = os.path.join(path, filename + '.pth')
|
|
1071 |
if path not in self.paths():
|
|
1072 |
logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest))
|
|
1073 |
if not self.simulate:
|
|
1074 |
if os.path.exists(dest):
|
|
1075 |
f = open(dest)
|
|
1076 |
lines = f.readlines()
|
|
1077 |
f.close()
|
|
1078 |
if lines and not lines[-1].endswith('\n'):
|
|
1079 |
lines[-1] += '\n'
|
|
1080 |
lines.append(filename+'\n')
|
|
1081 |
else:
|
|
1082 |
lines = [filename + '\n']
|
|
1083 |
f = open(dest, 'w')
|
|
1084 |
f.writelines(lines)
|
|
1085 |
f.close()
|
|
1086 |
|
|
1087 |
def pth_files(self):
|
|
1088 |
for path in self.paths():
|
|
1089 |
if not os.path.exists(path) or not os.path.isdir(path):
|
|
1090 |
continue
|
|
1091 |
for filename in os.listdir(path):
|
|
1092 |
if filename.endswith('.pth'):
|
|
1093 |
yield os.path.join(path, filename)
|
|
1094 |
|
|
1095 |
def find_package(self, package):
|
|
1096 |
for path in self.paths():
|
|
1097 |
full = os.path.join(path, package)
|
|
1098 |
if os.path.exists(full):
|
|
1099 |
return package, full
|
|
1100 |
if not os.path.isdir(path) and zipfile.is_zipfile(path):
|
|
1101 |
zip = zipfile.ZipFile(path, 'r')
|
|
1102 |
try:
|
|
1103 |
zip.read('%s/__init__.py' % package)
|
|
1104 |
except KeyError:
|
|
1105 |
pass
|
|
1106 |
else:
|
|
1107 |
zip.close()
|
|
1108 |
return package, full
|
|
1109 |
zip.close()
|
|
1110 |
## FIXME: need special error for package.py case:
|
|
1111 |
raise InstallationError(
|
|
1112 |
'No package with the name %s found' % package)
|
|
1113 |
|
|
1114 |
def list(self, options, args):
|
|
1115 |
if args:
|
|
1116 |
raise InstallationError(
|
|
1117 |
'You cannot give an argument with --list')
|
|
1118 |
for path in sorted(self.paths()):
|
|
1119 |
if not os.path.exists(path):
|
|
1120 |
continue
|
|
1121 |
basename = os.path.basename(path.rstrip(os.path.sep))
|
|
1122 |
if os.path.isfile(path) and zipfile.is_zipfile(path):
|
|
1123 |
if os.path.dirname(path) not in self.paths():
|
|
1124 |
logger.notify('Zipped egg: %s' % display_path(path))
|
|
1125 |
continue
|
|
1126 |
if (basename != 'site-packages'
|
|
1127 |
and not path.replace('\\', '/').endswith('lib/python')):
|
|
1128 |
continue
|
|
1129 |
logger.notify('In %s:' % display_path(path))
|
|
1130 |
logger.indent += 2
|
|
1131 |
zipped = []
|
|
1132 |
unzipped = []
|
|
1133 |
try:
|
|
1134 |
for filename in sorted(os.listdir(path)):
|
|
1135 |
ext = os.path.splitext(filename)[1].lower()
|
|
1136 |
if ext in ('.pth', '.egg-info', '.egg-link'):
|
|
1137 |
continue
|
|
1138 |
if ext == '.py':
|
|
1139 |
logger.info('Not displaying %s: not a package' % display_path(filename))
|
|
1140 |
continue
|
|
1141 |
full = os.path.join(path, filename)
|
|
1142 |
if os.path.isdir(full):
|
|
1143 |
unzipped.append((filename, self.count_package(full)))
|
|
1144 |
elif zipfile.is_zipfile(full):
|
|
1145 |
zipped.append(filename)
|
|
1146 |
else:
|
|
1147 |
logger.info('Unknown file: %s' % display_path(filename))
|
|
1148 |
if zipped:
|
|
1149 |
logger.notify('Zipped packages:')
|
|
1150 |
logger.indent += 2
|
|
1151 |
try:
|
|
1152 |
for filename in zipped:
|
|
1153 |
logger.notify(filename)
|
|
1154 |
finally:
|
|
1155 |
logger.indent -= 2
|
|
1156 |
else:
|
|
1157 |
logger.notify('No zipped packages.')
|
|
1158 |
if unzipped:
|
|
1159 |
if options.sort_files:
|
|
1160 |
unzipped.sort(key=lambda x: -x[1])
|
|
1161 |
logger.notify('Unzipped packages:')
|
|
1162 |
logger.indent += 2
|
|
1163 |
try:
|
|
1164 |
for filename, count in unzipped:
|
|
1165 |
logger.notify('%s (%i files)' % (filename, count))
|
|
1166 |
finally:
|
|
1167 |
logger.indent -= 2
|
|
1168 |
else:
|
|
1169 |
logger.notify('No unzipped packages.')
|
|
1170 |
finally:
|
|
1171 |
logger.indent -= 2
|
|
1172 |
|
|
1173 |
def count_package(self, path):
|
|
1174 |
total = 0
|
|
1175 |
for dirpath, dirnames, filenames in os.walk(path):
|
|
1176 |
filenames = [f for f in filenames
|
|
1177 |
if not f.lower().endswith('.pyc')]
|
|
1178 |
total += len(filenames)
|
|
1179 |
return total
|
|
1180 |
|
|
1181 |
ZipCommand()
|
|
1182 |
|
|
1183 |
class UnzipCommand(ZipCommand):
|
|
1184 |
name = 'unzip'
|
|
1185 |
summary = 'Unzip individual packages'
|
|
1186 |
|
|
1187 |
UnzipCommand()
|
|
1188 |
|
|
1189 |
BASE_COMPLETION = """
|
|
1190 |
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
|
|
1191 |
"""
|
|
1192 |
|
|
1193 |
COMPLETION_SCRIPTS = {
|
|
1194 |
'bash': """
|
|
1195 |
_pip_completion()
|
|
1196 |
{
|
|
1197 |
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
|
|
1198 |
COMP_CWORD=$COMP_CWORD \\
|
|
1199 |
PIP_AUTO_COMPLETE=1 $1 ) )
|
|
1200 |
}
|
|
1201 |
complete -o default -F _pip_completion pip
|
|
1202 |
""", 'zsh': """
|
|
1203 |
function _pip_completion {
|
|
1204 |
local words cword
|
|
1205 |
read -Ac words
|
|
1206 |
read -cn cword
|
|
1207 |
reply=( $( COMP_WORDS="$words[*]" \\
|
|
1208 |
COMP_CWORD=$(( cword-1 )) \\
|
|
1209 |
PIP_AUTO_COMPLETE=1 $words[1] ) )
|
|
1210 |
}
|
|
1211 |
compctl -K _pip_completion pip
|
|
1212 |
"""
|
|
1213 |
}
|
|
1214 |
|
|
1215 |
class CompletionCommand(Command):
|
|
1216 |
name = 'completion'
|
|
1217 |
summary = 'A helper command to be used for command completion'
|
|
1218 |
hidden = True
|
|
1219 |
|
|
1220 |
def __init__(self):
|
|
1221 |
super(CompletionCommand, self).__init__()
|
|
1222 |
self.parser.add_option(
|
|
1223 |
'--bash', '-b',
|
|
1224 |
action='store_const',
|
|
1225 |
const='bash',
|
|
1226 |
dest='shell',
|
|
1227 |
help='Emit completion code for bash')
|
|
1228 |
self.parser.add_option(
|
|
1229 |
'--zsh', '-z',
|
|
1230 |
action='store_const',
|
|
1231 |
const='zsh',
|
|
1232 |
dest='shell',
|
|
1233 |
help='Emit completion code for zsh')
|
|
1234 |
|
|
1235 |
def run(self, options, args):
|
|
1236 |
"""Prints the completion code of the given shell"""
|
|
1237 |
if options.shell in ('bash', 'zsh'):
|
|
1238 |
script = COMPLETION_SCRIPTS.get(options.shell, '')
|
|
1239 |
print BASE_COMPLETION % {'script': script, 'shell': options.shell}
|
|
1240 |
else:
|
|
1241 |
print 'ERROR: You must pass --bash or --zsh'
|
|
1242 |
|
|
1243 |
CompletionCommand()
|
|
1244 |
|
|
1245 |
def autocomplete():
|
|
1246 |
"""Command and option completion for the main option parser (and options)
|
|
1247 |
and its subcommands (and options).
|
|
1248 |
|
|
1249 |
Enable by sourcing one of the completion shell scripts (bash or zsh).
|
|
1250 |
"""
|
|
1251 |
# Don't complete if user hasn't sourced bash_completion file.
|
|
1252 |
if not os.environ.has_key('PIP_AUTO_COMPLETE'):
|
|
1253 |
return
|
|
1254 |
cwords = os.environ['COMP_WORDS'].split()[1:]
|
|
1255 |
cword = int(os.environ['COMP_CWORD'])
|
|
1256 |
try:
|
|
1257 |
current = cwords[cword-1]
|
|
1258 |
except IndexError:
|
|
1259 |
current = ''
|
|
1260 |
subcommands = [cmd for cmd, cls in _commands.items() if not cls.hidden]
|
|
1261 |
options = []
|
|
1262 |
# subcommand
|
|
1263 |
if cword == 1:
|
|
1264 |
# show options of main parser only when necessary
|
|
1265 |
if current.startswith('-') or current.startswith('--'):
|
|
1266 |
subcommands += [opt.get_opt_string()
|
|
1267 |
for opt in parser.option_list
|
|
1268 |
if opt.help != optparse.SUPPRESS_HELP]
|
|
1269 |
print ' '.join(filter(lambda x: x.startswith(current), subcommands))
|
|
1270 |
# subcommand options
|
|
1271 |
# special case: the 'help' subcommand has no options
|
|
1272 |
elif cwords[0] in subcommands and cwords[0] != 'help':
|
|
1273 |
subcommand = _commands.get(cwords[0])
|
|
1274 |
options += [(opt.get_opt_string(), opt.nargs)
|
|
1275 |
for opt in subcommand.parser.option_list
|
|
1276 |
if opt.help != optparse.SUPPRESS_HELP]
|
|
1277 |
# filter out previously specified options from available options
|
|
1278 |
prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
|
|
1279 |
options = filter(lambda (x, v): x not in prev_opts, options)
|
|
1280 |
# filter options by current input
|
|
1281 |
options = [(k, v) for k, v in options if k.startswith(current)]
|
|
1282 |
for option in options:
|
|
1283 |
opt_label = option[0]
|
|
1284 |
# append '=' to options which require args
|
|
1285 |
if option[1]:
|
|
1286 |
opt_label += '='
|
|
1287 |
print opt_label
|
|
1288 |
sys.exit(1)
|
|
1289 |
|
|
1290 |
def main(initial_args=None):
|
|
1291 |
if initial_args is None:
|
|
1292 |
initial_args = sys.argv[1:]
|
|
1293 |
autocomplete()
|
|
1294 |
options, args = parser.parse_args(initial_args)
|
|
1295 |
if options.help and not args:
|
|
1296 |
args = ['help']
|
|
1297 |
if not args:
|
|
1298 |
parser.error('You must give a command (use "pip help" see a list of commands)')
|
|
1299 |
command = args[0].lower()
|
|
1300 |
## FIXME: search for a command match?
|
|
1301 |
if command not in _commands:
|
|
1302 |
parser.error('No command by the name %(script)s %(arg)s\n (maybe you meant "%(script)s install %(arg)s")'
|
|
1303 |
% dict(script=os.path.basename(sys.argv[0]), arg=command))
|
|
1304 |
command = _commands[command]
|
|
1305 |
return command.main(initial_args, args[1:], options)
|
|
1306 |
|
|
1307 |
def get_proxy(proxystr=''):
|
|
1308 |
"""Get the proxy given the option passed on the command line. If an
|
|
1309 |
empty string is passed it looks at the HTTP_PROXY environment
|
|
1310 |
variable."""
|
|
1311 |
if not proxystr:
|
|
1312 |
proxystr = os.environ.get('HTTP_PROXY', '')
|
|
1313 |
if proxystr:
|
|
1314 |
if '@' in proxystr:
|
|
1315 |
user_password, server_port = proxystr.split('@', 1)
|
|
1316 |
if ':' in user_password:
|
|
1317 |
user, password = user_password.split(':', 1)
|
|
1318 |
else:
|
|
1319 |
user = user_password
|
|
1320 |
import getpass
|
|
1321 |
prompt = 'Password for %s@%s: ' % (user, server_port)
|
|
1322 |
password = urllib.quote(getpass.getpass(prompt))
|
|
1323 |
return '%s:%s@%s' % (user, password, server_port)
|
|
1324 |
else:
|
|
1325 |
return proxystr
|
|
1326 |
else:
|
|
1327 |
return None
|
|
1328 |
|
|
1329 |
def setup_proxy_handler(proxystr=''):
|
|
1330 |
"""Set the proxy handler given the option passed on the command
|
|
1331 |
line. If an empty string is passed it looks at the HTTP_PROXY
|
|
1332 |
environment variable. """
|
|
1333 |
proxy = get_proxy(proxystr)
|
|
1334 |
if proxy:
|
|
1335 |
proxy_support = urllib2.ProxyHandler({"http": proxy, "ftp": proxy})
|
|
1336 |
opener = urllib2.build_opener(proxy_support, urllib2.CacheFTPHandler)
|
|
1337 |
urllib2.install_opener(opener)
|
|
1338 |
|
|
1339 |
def format_exc(exc_info=None):
|
|
1340 |
if exc_info is None:
|
|
1341 |
exc_info = sys.exc_info()
|
|
1342 |
out = StringIO()
|
|
1343 |
traceback.print_exception(*exc_info, **dict(file=out))
|
|
1344 |
return out.getvalue()
|
|
1345 |
|
|
1346 |
def restart_in_venv(venv, base, site_packages, args):
|
|
1347 |
"""
|
|
1348 |
Restart this script using the interpreter in the given virtual environment
|
|
1349 |
"""
|
|
1350 |
if base and not os.path.isabs(venv) and not venv.startswith('~'):
|
|
1351 |
base = os.path.expanduser(base)
|
|
1352 |
# ensure we have an abs basepath at this point:
|
|
1353 |
# a relative one makes no sense (or does it?)
|
|
1354 |
if os.path.isabs(base):
|
|
1355 |
venv = os.path.join(base, venv)
|
|
1356 |
|
|
1357 |
if venv.startswith('~'):
|
|
1358 |
venv = os.path.expanduser(venv)
|
|
1359 |
|
|
1360 |
if not os.path.exists(venv):
|
|
1361 |
try:
|
|
1362 |
import virtualenv
|
|
1363 |
except ImportError:
|
|
1364 |
print 'The virtual environment does not exist: %s' % venv
|
|
1365 |
print 'and virtualenv is not installed, so a new environment cannot be created'
|
|
1366 |
sys.exit(3)
|
|
1367 |
print 'Creating new virtualenv environment in %s' % venv
|
|
1368 |
virtualenv.logger = logger
|
|
1369 |
logger.indent += 2
|
|
1370 |
virtualenv.create_environment(venv, site_packages=site_packages)
|
|
1371 |
if sys.platform == 'win32':
|
|
1372 |
python = os.path.join(venv, 'Scripts', 'python.exe')
|
|
1373 |
# check for bin directory which is used in buildouts
|
|
1374 |
if not os.path.exists(python):
|
|
1375 |
python = os.path.join(venv, 'bin', 'python.exe')
|
|
1376 |
else:
|
|
1377 |
python = os.path.join(venv, 'bin', 'python')
|
|
1378 |
if not os.path.exists(python):
|
|
1379 |
python = venv
|
|
1380 |
if not os.path.exists(python):
|
|
1381 |
raise BadCommand('Cannot find virtual environment interpreter at %s' % python)
|
|
1382 |
base = os.path.dirname(os.path.dirname(python))
|
|
1383 |
file = __file__
|
|
1384 |
if file.endswith('.pyc'):
|
|
1385 |
file = file[:-1]
|
|
1386 |
proc = subprocess.Popen(
|
|
1387 |
[python, file] + args + [base, '___VENV_RESTART___'])
|
|
1388 |
proc.wait()
|
|
1389 |
sys.exit(proc.returncode)
|
|
1390 |
|
|
1391 |
class PackageFinder(object):
|
|
1392 |
"""This finds packages.
|
|
1393 |
|
|
1394 |
This is meant to match easy_install's technique for looking for
|
|
1395 |
packages, by reading pages and looking for appropriate links
|
|
1396 |
"""
|
|
1397 |
|
|
1398 |
failure_limit = 3
|
|
1399 |
|
|
1400 |
def __init__(self, find_links, index_urls):
|
|
1401 |
self.find_links = find_links
|
|
1402 |
self.index_urls = index_urls
|
|
1403 |
self.dependency_links = []
|
|
1404 |
self.cache = PageCache()
|
|
1405 |
# These are boring links that have already been logged somehow:
|
|
1406 |
self.logged_links = set()
|
|
1407 |
|
|
1408 |
def add_dependency_links(self, links):
|
|
1409 |
## FIXME: this shouldn't be global list this, it should only
|
|
1410 |
## apply to requirements of the package that specifies the
|
|
1411 |
## dependency_links value
|
|
1412 |
## FIXME: also, we should track comes_from (i.e., use Link)
|
|
1413 |
self.dependency_links.extend(links)
|
|
1414 |
|
|
1415 |
def find_requirement(self, req, upgrade):
|
|
1416 |
url_name = req.url_name
|
|
1417 |
# Only check main index if index URL is given:
|
|
1418 |
main_index_url = None
|
|
1419 |
if self.index_urls:
|
|
1420 |
# Check that we have the url_name correctly spelled:
|
|
1421 |
main_index_url = Link(posixpath.join(self.index_urls[0], url_name))
|
|
1422 |
# This will also cache the page, so it's okay that we get it again later:
|
|
1423 |
page = self._get_page(main_index_url, req)
|
|
1424 |
if page is None:
|
|
1425 |
url_name = self._find_url_name(Link(self.index_urls[0]), url_name, req) or req.url_name
|
|
1426 |
def mkurl_pypi_url(url):
|
|
1427 |
loc = posixpath.join(url, url_name)
|
|
1428 |
# For maximum compatibility with easy_install, ensure the path
|
|
1429 |
# ends in a trailing slash. Although this isn't in the spec
|
|
1430 |
# (and PyPI can handle it without the slash) some other index
|
|
1431 |
# implementations might break if they relied on easy_install's behavior.
|
|
1432 |
if not loc.endswith('/'):
|
|
1433 |
loc = loc + '/'
|
|
1434 |
return loc
|
|
1435 |
if url_name is not None:
|
|
1436 |
locations = [
|
|
1437 |
mkurl_pypi_url(url)
|
|
1438 |
for url in self.index_urls] + self.find_links
|
|
1439 |
else:
|
|
1440 |
locations = list(self.find_links)
|
|
1441 |
locations.extend(self.dependency_links)
|
|
1442 |
for version in req.absolute_versions:
|
|
1443 |
if url_name is not None and main_index_url is not None:
|
|
1444 |
locations = [
|
|
1445 |
posixpath.join(main_index_url.url, version)] + locations
|
|
1446 |
file_locations = []
|
|
1447 |
url_locations = []
|
|
1448 |
for url in locations:
|
|
1449 |
if url.startswith('file:'):
|
|
1450 |
fn = url_to_filename(url)
|
|
1451 |
if os.path.isdir(fn):
|
|
1452 |
path = os.path.realpath(fn)
|
|
1453 |
for item in os.listdir(path):
|
|
1454 |
file_locations.append(
|
|
1455 |
filename_to_url2(os.path.join(path, item)))
|
|
1456 |
elif os.path.isfile(fn):
|
|
1457 |
file_locations.append(filename_to_url2(fn))
|
|
1458 |
else:
|
|
1459 |
url_locations.append(url)
|
|
1460 |
|
|
1461 |
locations = [Link(url) for url in url_locations]
|
|
1462 |
logger.debug('URLs to search for versions for %s:' % req)
|
|
1463 |
for location in locations:
|
|
1464 |
logger.debug('* %s' % location)
|
|
1465 |
found_versions = []
|
|
1466 |
found_versions.extend(
|
|
1467 |
self._package_versions(
|
|
1468 |
[Link(url, '-f') for url in self.find_links], req.name.lower()))
|
|
1469 |
page_versions = []
|
|
1470 |
for page in self._get_pages(locations, req):
|
|
1471 |
logger.debug('Analyzing links from page %s' % page.url)
|
|
1472 |
logger.indent += 2
|
|
1473 |
try:
|
|
1474 |
page_versions.extend(self._package_versions(page.links, req.name.lower()))
|
|
1475 |
finally:
|
|
1476 |
logger.indent -= 2
|
|
1477 |
dependency_versions = list(self._package_versions(
|
|
1478 |
[Link(url) for url in self.dependency_links], req.name.lower()))
|
|
1479 |
if dependency_versions:
|
|
1480 |
logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
|
|
1481 |
file_versions = list(self._package_versions(
|
|
1482 |
[Link(url) for url in file_locations], req.name.lower()))
|
|
1483 |
if not found_versions and not page_versions and not dependency_versions and not file_versions:
|
|
1484 |
logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
|
|
1485 |
raise DistributionNotFound('No distributions at all found for %s' % req)
|
|
1486 |
if req.satisfied_by is not None:
|
|
1487 |
found_versions.append((req.satisfied_by.parsed_version, Inf, req.satisfied_by.version))
|
|
1488 |
if file_versions:
|
|
1489 |
file_versions.sort(reverse=True)
|
|
1490 |
logger.info('Local files found: %s' % ', '.join([url_to_filename(link.url) for parsed, link, version in file_versions]))
|
|
1491 |
found_versions = file_versions + found_versions
|
|
1492 |
all_versions = found_versions + page_versions + dependency_versions
|
|
1493 |
applicable_versions = []
|
|
1494 |
for (parsed_version, link, version) in all_versions:
|
|
1495 |
if version not in req.req:
|
|
1496 |
logger.info("Ignoring link %s, version %s doesn't match %s"
|
|
1497 |
% (link, version, ','.join([''.join(s) for s in req.req.specs])))
|
|
1498 |
continue
|
|
1499 |
applicable_versions.append((link, version))
|
|
1500 |
applicable_versions = sorted(applicable_versions, key=operator.itemgetter(1),
|
|
1501 |
cmp=lambda x, y : cmp(pkg_resources.parse_version(y), pkg_resources.parse_version(x))
|
|
1502 |
)
|
|
1503 |
existing_applicable = bool([link for link, version in applicable_versions if link is Inf])
|
|
1504 |
if not upgrade and existing_applicable:
|
|
1505 |
if applicable_versions[0][1] is Inf:
|
|
1506 |
logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
|
|
1507 |
% req.satisfied_by.version)
|
|
1508 |
else:
|
|
1509 |
logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
|
|
1510 |
% (req.satisfied_by.version, applicable_versions[0][1]))
|
|
1511 |
return None
|
|
1512 |
if not applicable_versions:
|
|
1513 |
logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
|
|
1514 |
% (req, ', '.join([version for parsed_version, link, version in found_versions])))
|
|
1515 |
raise DistributionNotFound('No distributions matching the version for %s' % req)
|
|
1516 |
if applicable_versions[0][0] is Inf:
|
|
1517 |
# We have an existing version, and its the best version
|
|
1518 |
logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
|
|
1519 |
% (req.satisfied_by.version, ', '.join([version for link, version in applicable_versions[1:]]) or 'none'))
|
|
1520 |
return None
|
|
1521 |
if len(applicable_versions) > 1:
|
|
1522 |
logger.info('Using version %s (newest of versions: %s)' %
|
|
1523 |
(applicable_versions[0][1], ', '.join([version for link, version in applicable_versions])))
|
|
1524 |
return applicable_versions[0][0]
|
|
1525 |
|
|
1526 |
def _find_url_name(self, index_url, url_name, req):
|
|
1527 |
"""Finds the true URL name of a package, when the given name isn't quite correct.
|
|
1528 |
This is usually used to implement case-insensitivity."""
|
|
1529 |
if not index_url.url.endswith('/'):
|
|
1530 |
# Vaguely part of the PyPI API... weird but true.
|
|
1531 |
## FIXME: bad to modify this?
|
|
1532 |
index_url.url += '/'
|
|
1533 |
page = self._get_page(index_url, req)
|
|
1534 |
if page is None:
|
|
1535 |
logger.fatal('Cannot fetch index base URL %s' % index_url)
|
|
1536 |
return
|
|
1537 |
norm_name = normalize_name(req.url_name)
|
|
1538 |
for link in page.links:
|
|
1539 |
base = posixpath.basename(link.path.rstrip('/'))
|
|
1540 |
if norm_name == normalize_name(base):
|
|
1541 |
logger.notify('Real name of requirement %s is %s' % (url_name, base))
|
|
1542 |
return base
|
|
1543 |
return None
|
|
1544 |
|
|
1545 |
def _get_pages(self, locations, req):
|
|
1546 |
"""Yields (page, page_url) from the given locations, skipping
|
|
1547 |
locations that have errors, and adding download/homepage links"""
|
|
1548 |
pending_queue = Queue()
|
|
1549 |
for location in locations:
|
|
1550 |
pending_queue.put(location)
|
|
1551 |
done = []
|
|
1552 |
seen = set()
|
|
1553 |
threads = []
|
|
1554 |
for i in range(min(10, len(locations))):
|
|
1555 |
t = threading.Thread(target=self._get_queued_page, args=(req, pending_queue, done, seen))
|
|
1556 |
t.setDaemon(True)
|
|
1557 |
threads.append(t)
|
|
1558 |
t.start()
|
|
1559 |
for t in threads:
|
|
1560 |
t.join()
|
|
1561 |
return done
|
|
1562 |
|
|
1563 |
_log_lock = threading.Lock()
|
|
1564 |
|
|
1565 |
def _get_queued_page(self, req, pending_queue, done, seen):
|
|
1566 |
while 1:
|
|
1567 |
try:
|
|
1568 |
location = pending_queue.get(False)
|
|
1569 |
except QueueEmpty:
|
|
1570 |
return
|
|
1571 |
if location in seen:
|
|
1572 |
continue
|
|
1573 |
seen.add(location)
|
|
1574 |
page = self._get_page(location, req)
|
|
1575 |
if page is None:
|
|
1576 |
continue
|
|
1577 |
done.append(page)
|
|
1578 |
for link in page.rel_links():
|
|
1579 |
pending_queue.put(link)
|
|
1580 |
|
|
1581 |
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
|
|
1582 |
_egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
|
|
1583 |
_py_version_re = re.compile(r'-py([123]\.[0-9])$')
|
|
1584 |
|
|
1585 |
def _sort_links(self, links):
|
|
1586 |
"Brings links in order, non-egg links first, egg links second"
|
|
1587 |
eggs, no_eggs = [], []
|
|
1588 |
for link in links:
|
|
1589 |
if link.egg_fragment:
|
|
1590 |
eggs.append(link)
|
|
1591 |
else:
|
|
1592 |
no_eggs.append(link)
|
|
1593 |
return no_eggs + eggs
|
|
1594 |
|
|
1595 |
def _package_versions(self, links, search_name):
|
|
1596 |
seen_links = {}
|
|
1597 |
for link in self._sort_links(links):
|
|
1598 |
if link.url in seen_links:
|
|
1599 |
continue
|
|
1600 |
seen_links[link.url] = None
|
|
1601 |
if link.egg_fragment:
|
|
1602 |
egg_info = link.egg_fragment
|
|
1603 |
else:
|
|
1604 |
path = link.path
|
|
1605 |
egg_info, ext = link.splitext()
|
|
1606 |
if not ext:
|
|
1607 |
if link not in self.logged_links:
|
|
1608 |
logger.debug('Skipping link %s; not a file' % link)
|
|
1609 |
self.logged_links.add(link)
|
|
1610 |
continue
|
|
1611 |
if egg_info.endswith('.tar'):
|
|
1612 |
# Special double-extension case:
|
|
1613 |
egg_info = egg_info[:-4]
|
|
1614 |
ext = '.tar' + ext
|
|
1615 |
if ext not in ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip'):
|
|
1616 |
if link not in self.logged_links:
|
|
1617 |
logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
|
|
1618 |
self.logged_links.add(link)
|
|
1619 |
continue
|
|
1620 |
version = self._egg_info_matches(egg_info, search_name, link)
|
|
1621 |
if version is None:
|
|
1622 |
logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
|
|
1623 |
continue
|
|
1624 |
match = self._py_version_re.search(version)
|
|
1625 |
if match:
|
|
1626 |
version = version[:match.start()]
|
|
1627 |
py_version = match.group(1)
|
|
1628 |
if py_version != sys.version[:3]:
|
|
1629 |
logger.debug('Skipping %s because Python version is incorrect' % link)
|
|
1630 |
continue
|
|
1631 |
logger.debug('Found link %s, version: %s' % (link, version))
|
|
1632 |
yield (pkg_resources.parse_version(version),
|
|
1633 |
link,
|
|
1634 |
version)
|
|
1635 |
|
|
1636 |
def _egg_info_matches(self, egg_info, search_name, link):
|
|
1637 |
match = self._egg_info_re.search(egg_info)
|
|
1638 |
if not match:
|
|
1639 |
logger.debug('Could not parse version from link: %s' % link)
|
|
1640 |
return None
|
|
1641 |
name = match.group(0).lower()
|
|
1642 |
# To match the "safe" name that pkg_resources creates:
|
|
1643 |
name = name.replace('_', '-')
|
|
1644 |
if name.startswith(search_name.lower()):
|
|
1645 |
return match.group(0)[len(search_name):].lstrip('-')
|
|
1646 |
else:
|
|
1647 |
return None
|
|
1648 |
|
|
1649 |
def _get_page(self, link, req):
|
|
1650 |
return HTMLPage.get_page(link, req, cache=self.cache)
|
|
1651 |
|
|
1652 |
|
|
1653 |
class InstallRequirement(object):
|
|
1654 |
|
|
1655 |
def __init__(self, req, comes_from, source_dir=None, editable=False,
|
|
1656 |
url=None, update=True):
|
|
1657 |
if isinstance(req, basestring):
|
|
1658 |
req = pkg_resources.Requirement.parse(req)
|
|
1659 |
self.req = req
|
|
1660 |
self.comes_from = comes_from
|
|
1661 |
self.source_dir = source_dir
|
|
1662 |
self.editable = editable
|
|
1663 |
self.url = url
|
|
1664 |
self._egg_info_path = None
|
|
1665 |
# This holds the pkg_resources.Distribution object if this requirement
|
|
1666 |
# is already available:
|
|
1667 |
self.satisfied_by = None
|
|
1668 |
# This hold the pkg_resources.Distribution object if this requirement
|
|
1669 |
# conflicts with another installed distribution:
|
|
1670 |
self.conflicts_with = None
|
|
1671 |
self._temp_build_dir = None
|
|
1672 |
self._is_bundle = None
|
|
1673 |
# True if the editable should be updated:
|
|
1674 |
self.update = update
|
|
1675 |
# Set to True after successful installation
|
|
1676 |
self.install_succeeded = None
|
|
1677 |
# UninstallPathSet of uninstalled distribution (for possible rollback)
|
|
1678 |
self.uninstalled = None
|
|
1679 |
|
|
1680 |
@classmethod
|
|
1681 |
def from_editable(cls, editable_req, comes_from=None, default_vcs=None):
|
|
1682 |
name, url = parse_editable(editable_req, default_vcs)
|
|
1683 |
if url.startswith('file:'):
|
|
1684 |
source_dir = url_to_filename(url)
|
|
1685 |
else:
|
|
1686 |
source_dir = None
|
|
1687 |
return cls(name, comes_from, source_dir=source_dir, editable=True, url=url)
|
|
1688 |
|
|
1689 |
@classmethod
|
|
1690 |
def from_line(cls, name, comes_from=None):
|
|
1691 |
"""Creates an InstallRequirement from a name, which might be a
|
|
1692 |
requirement, filename, or URL.
|
|
1693 |
"""
|
|
1694 |
url = None
|
|
1695 |
name = name.strip()
|
|
1696 |
req = name
|
|
1697 |
if is_url(name):
|
|
1698 |
url = name
|
|
1699 |
## FIXME: I think getting the requirement here is a bad idea:
|
|
1700 |
#req = get_requirement_from_url(url)
|
|
1701 |
req = None
|
|
1702 |
elif is_filename(name):
|
|
1703 |
if not os.path.exists(name):
|
|
1704 |
logger.warn('Requirement %r looks like a filename, but the file does not exist'
|
|
1705 |
% name)
|
|
1706 |
url = filename_to_url(name)
|
|
1707 |
#req = get_requirement_from_url(url)
|
|
1708 |
req = None
|
|
1709 |
return cls(req, comes_from, url=url)
|
|
1710 |
|
|
1711 |
def __str__(self):
|
|
1712 |
if self.req:
|
|
1713 |
s = str(self.req)
|
|
1714 |
if self.url:
|
|
1715 |
s += ' from %s' % self.url
|
|
1716 |
else:
|
|
1717 |
s = self.url
|
|
1718 |
if self.satisfied_by is not None:
|
|
1719 |
s += ' in %s' % display_path(self.satisfied_by.location)
|
|
1720 |
if self.comes_from:
|
|
1721 |
if isinstance(self.comes_from, basestring):
|
|
1722 |
comes_from = self.comes_from
|
|
1723 |
else:
|
|
1724 |
comes_from = self.comes_from.from_path()
|
|
1725 |
if comes_from:
|
|
1726 |
s += ' (from %s)' % comes_from
|
|
1727 |
return s
|
|
1728 |
|
|
1729 |
def from_path(self):
|
|
1730 |
if self.req is None:
|
|
1731 |
return None
|
|
1732 |
s = str(self.req)
|
|
1733 |
if self.comes_from:
|
|
1734 |
if isinstance(self.comes_from, basestring):
|
|
1735 |
comes_from = self.comes_from
|
|
1736 |
else:
|
|
1737 |
comes_from = self.comes_from.from_path()
|
|
1738 |
if comes_from:
|
|
1739 |
s += '->' + comes_from
|
|
1740 |
return s
|
|
1741 |
|
|
1742 |
def build_location(self, build_dir, unpack=True):
|
|
1743 |
if self._temp_build_dir is not None:
|
|
1744 |
return self._temp_build_dir
|
|
1745 |
if self.req is None:
|
|
1746 |
self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-')
|
|
1747 |
self._ideal_build_dir = build_dir
|
|
1748 |
return self._temp_build_dir
|
|
1749 |
if self.editable:
|
|
1750 |
name = self.name.lower()
|
|
1751 |
else:
|
|
1752 |
name = self.name
|
|
1753 |
# FIXME: Is there a better place to create the build_dir? (hg and bzr need this)
|
|
1754 |
if not os.path.exists(build_dir):
|
|
1755 |
os.makedirs(build_dir)
|
|
1756 |
return os.path.join(build_dir, name)
|
|
1757 |
|
|
1758 |
def correct_build_location(self):
|
|
1759 |
"""If the build location was a temporary directory, this will move it
|
|
1760 |
to a new more permanent location"""
|
|
1761 |
if self.source_dir is not None:
|
|
1762 |
return
|
|
1763 |
assert self.req is not None
|
|
1764 |
assert self._temp_build_dir
|
|
1765 |
old_location = self._temp_build_dir
|
|
1766 |
new_build_dir = self._ideal_build_dir
|
|
1767 |
del self._ideal_build_dir
|
|
1768 |
if self.editable:
|
|
1769 |
name = self.name.lower()
|
|
1770 |
else:
|
|
1771 |
name = self.name
|
|
1772 |
new_location = os.path.join(new_build_dir, name)
|
|
1773 |
if not os.path.exists(new_build_dir):
|
|
1774 |
logger.debug('Creating directory %s' % new_build_dir)
|
|
1775 |
os.makedirs(new_build_dir)
|
|
1776 |
if os.path.exists(new_location):
|
|
1777 |
raise InstallationError(
|
|
1778 |
'A package already exists in %s; please remove it to continue'
|
|
1779 |
% display_path(new_location))
|
|
1780 |
logger.debug('Moving package %s from %s to new location %s'
|
|
1781 |
% (self, display_path(old_location), display_path(new_location)))
|
|
1782 |
shutil.move(old_location, new_location)
|
|
1783 |
self._temp_build_dir = new_location
|
|
1784 |
self.source_dir = new_location
|
|
1785 |
self._egg_info_path = None
|
|
1786 |
|
|
1787 |
@property
|
|
1788 |
def name(self):
|
|
1789 |
if self.req is None:
|
|
1790 |
return None
|
|
1791 |
return self.req.project_name
|
|
1792 |
|
|
1793 |
@property
|
|
1794 |
def url_name(self):
|
|
1795 |
if self.req is None:
|
|
1796 |
return None
|
|
1797 |
return urllib.quote(self.req.unsafe_name)
|
|
1798 |
|
|
1799 |
@property
|
|
1800 |
def setup_py(self):
|
|
1801 |
return os.path.join(self.source_dir, 'setup.py')
|
|
1802 |
|
|
1803 |
def run_egg_info(self, force_root_egg_info=False):
|
|
1804 |
assert self.source_dir
|
|
1805 |
if self.name:
|
|
1806 |
logger.notify('Running setup.py egg_info for package %s' % self.name)
|
|
1807 |
else:
|
|
1808 |
logger.notify('Running setup.py egg_info for package from %s' % self.url)
|
|
1809 |
logger.indent += 2
|
|
1810 |
try:
|
|
1811 |
script = self._run_setup_py
|
|
1812 |
script = script.replace('__SETUP_PY__', repr(self.setup_py))
|
|
1813 |
script = script.replace('__PKG_NAME__', repr(self.name))
|
|
1814 |
# We can't put the .egg-info files at the root, because then the source code will be mistaken
|
|
1815 |
# for an installed egg, causing problems
|
|
1816 |
if self.editable or force_root_egg_info:
|
|
1817 |
egg_base_option = []
|
|
1818 |
else:
|
|
1819 |
egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info')
|
|
1820 |
if not os.path.exists(egg_info_dir):
|
|
1821 |
os.makedirs(egg_info_dir)
|
|
1822 |
egg_base_option = ['--egg-base', 'pip-egg-info']
|
|
1823 |
call_subprocess(
|
|
1824 |
[sys.executable, '-c', script, 'egg_info'] + egg_base_option,
|
|
1825 |
cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False,
|
|
1826 |
command_level=Logger.VERBOSE_DEBUG,
|
|
1827 |
command_desc='python setup.py egg_info')
|
|
1828 |
finally:
|
|
1829 |
logger.indent -= 2
|
|
1830 |
if not self.req:
|
|
1831 |
self.req = pkg_resources.Requirement.parse(self.pkg_info()['Name'])
|
|
1832 |
self.correct_build_location()
|
|
1833 |
|
|
1834 |
## FIXME: This is a lame hack, entirely for PasteScript which has
|
|
1835 |
## a self-provided entry point that causes this awkwardness
|
|
1836 |
_run_setup_py = """
|
|
1837 |
__file__ = __SETUP_PY__
|
|
1838 |
from setuptools.command import egg_info
|
|
1839 |
def replacement_run(self):
|
|
1840 |
self.mkpath(self.egg_info)
|
|
1841 |
installer = self.distribution.fetch_build_egg
|
|
1842 |
for ep in egg_info.iter_entry_points('egg_info.writers'):
|
|
1843 |
# require=False is the change we're making:
|
|
1844 |
writer = ep.load(require=False)
|
|
1845 |
if writer:
|
|
1846 |
writer(self, ep.name, egg_info.os.path.join(self.egg_info,ep.name))
|
|
1847 |
self.find_sources()
|
|
1848 |
egg_info.egg_info.run = replacement_run
|
|
1849 |
execfile(__file__)
|
|
1850 |
"""
|
|
1851 |
|
|
1852 |
def egg_info_data(self, filename):
|
|
1853 |
if self.satisfied_by is not None:
|
|
1854 |
if not self.satisfied_by.has_metadata(filename):
|
|
1855 |
return None
|
|
1856 |
return self.satisfied_by.get_metadata(filename)
|
|
1857 |
assert self.source_dir
|
|
1858 |
filename = self.egg_info_path(filename)
|
|
1859 |
if not os.path.exists(filename):
|
|
1860 |
return None
|
|
1861 |
fp = open(filename, 'r')
|
|
1862 |
data = fp.read()
|
|
1863 |
fp.close()
|
|
1864 |
return data
|
|
1865 |
|
|
1866 |
def egg_info_path(self, filename):
|
|
1867 |
if self._egg_info_path is None:
|
|
1868 |
if self.editable:
|
|
1869 |
base = self.source_dir
|
|
1870 |
else:
|
|
1871 |
base = os.path.join(self.source_dir, 'pip-egg-info')
|
|
1872 |
filenames = os.listdir(base)
|
|
1873 |
if self.editable:
|
|
1874 |
filenames = []
|
|
1875 |
for root, dirs, files in os.walk(base):
|
|
1876 |
for dir in vcs.dirnames:
|
|
1877 |
if dir in dirs:
|
|
1878 |
dirs.remove(dir)
|
|
1879 |
filenames.extend([os.path.join(root, dir)
|
|
1880 |
for dir in dirs])
|
|
1881 |
filenames = [f for f in filenames if f.endswith('.egg-info')]
|
|
1882 |
assert filenames, "No files/directories in %s (from %s)" % (base, filename)
|
|
1883 |
assert len(filenames) == 1, "Unexpected files/directories in %s: %s" % (base, ' '.join(filenames))
|
|
1884 |
self._egg_info_path = os.path.join(base, filenames[0])
|
|
1885 |
return os.path.join(self._egg_info_path, filename)
|
|
1886 |
|
|
1887 |
def egg_info_lines(self, filename):
|
|
1888 |
data = self.egg_info_data(filename)
|
|
1889 |
if not data:
|
|
1890 |
return []
|
|
1891 |
result = []
|
|
1892 |
for line in data.splitlines():
|
|
1893 |
line = line.strip()
|
|
1894 |
if not line or line.startswith('#'):
|
|
1895 |
continue
|
|
1896 |
result.append(line)
|
|
1897 |
return result
|
|
1898 |
|
|
1899 |
def pkg_info(self):
|
|
1900 |
p = FeedParser()
|
|
1901 |
data = self.egg_info_data('PKG-INFO')
|
|
1902 |
if not data:
|
|
1903 |
logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO')))
|
|
1904 |
p.feed(data or '')
|
|
1905 |
return p.close()
|
|
1906 |
|
|
1907 |
@property
|
|
1908 |
def dependency_links(self):
|
|
1909 |
return self.egg_info_lines('dependency_links.txt')
|
|
1910 |
|
|
1911 |
_requirements_section_re = re.compile(r'\[(.*?)\]')
|
|
1912 |
|
|
1913 |
def requirements(self, extras=()):
|
|
1914 |
in_extra = None
|
|
1915 |
for line in self.egg_info_lines('requires.txt'):
|
|
1916 |
match = self._requirements_section_re.match(line)
|
|
1917 |
if match:
|
|
1918 |
in_extra = match.group(1)
|
|
1919 |
continue
|
|
1920 |
if in_extra and in_extra not in extras:
|
|
1921 |
# Skip requirement for an extra we aren't requiring
|
|
1922 |
continue
|
|
1923 |
yield line
|
|
1924 |
|
|
1925 |
@property
|
|
1926 |
def absolute_versions(self):
|
|
1927 |
for qualifier, version in self.req.specs:
|
|
1928 |
if qualifier == '==':
|
|
1929 |
yield version
|
|
1930 |
|
|
1931 |
@property
|
|
1932 |
def installed_version(self):
|
|
1933 |
return self.pkg_info()['version']
|
|
1934 |
|
|
1935 |
def assert_source_matches_version(self):
|
|
1936 |
assert self.source_dir
|
|
1937 |
if self.comes_from is None:
|
|
1938 |
# We don't check the versions of things explicitly installed.
|
|
1939 |
# This makes, e.g., "pip Package==dev" possible
|
|
1940 |
return
|
|
1941 |
version = self.installed_version
|
|
1942 |
if version not in self.req:
|
|
1943 |
logger.fatal(
|
|
1944 |
'Source in %s has the version %s, which does not match the requirement %s'
|
|
1945 |
% (display_path(self.source_dir), version, self))
|
|
1946 |
raise InstallationError(
|
|
1947 |
'Source in %s has version %s that conflicts with %s'
|
|
1948 |
% (display_path(self.source_dir), version, self))
|
|
1949 |
else:
|
|
1950 |
logger.debug('Source in %s has version %s, which satisfies requirement %s'
|
|
1951 |
% (display_path(self.source_dir), version, self))
|
|
1952 |
|
|
1953 |
def update_editable(self, obtain=True):
|
|
1954 |
if not self.url:
|
|
1955 |
logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir)
|
|
1956 |
return
|
|
1957 |
assert self.editable
|
|
1958 |
assert self.source_dir
|
|
1959 |
if self.url.startswith('file:'):
|
|
1960 |
# Static paths don't get updated
|
|
1961 |
return
|
|
1962 |
assert '+' in self.url, "bad url: %r" % self.url
|
|
1963 |
if not self.update:
|
|
1964 |
return
|
|
1965 |
vc_type, url = self.url.split('+', 1)
|
|
1966 |
backend = vcs.get_backend(vc_type)
|
|
1967 |
if backend:
|
|
1968 |
vcs_backend = backend(self.url)
|
|
1969 |
if obtain:
|
|
1970 |
vcs_backend.obtain(self.source_dir)
|
|
1971 |
else:
|
|
1972 |
vcs_backend.export(self.source_dir)
|
|
1973 |
else:
|
|
1974 |
assert 0, (
|
|
1975 |
'Unexpected version control type (in %s): %s'
|
|
1976 |
% (self.url, vc_type))
|
|
1977 |
|
|
1978 |
def uninstall(self, auto_confirm=False):
|
|
1979 |
"""
|
|
1980 |
Uninstall the distribution currently satisfying this requirement.
|
|
1981 |
|
|
1982 |
Prompts before removing or modifying files unless
|
|
1983 |
``auto_confirm`` is True.
|
|
1984 |
|
|
1985 |
Refuses to delete or modify files outside of ``sys.prefix`` -
|
|
1986 |
thus uninstallation within a virtual environment can only
|
|
1987 |
modify that virtual environment, even if the virtualenv is
|
|
1988 |
linked to global site-packages.
|
|
1989 |
|
|
1990 |
"""
|
|
1991 |
if not self.check_if_exists():
|
|
1992 |
raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))
|
|
1993 |
dist = self.satisfied_by or self.conflicts_with
|
|
1994 |
paths_to_remove = UninstallPathSet(dist, sys.prefix)
|
|
1995 |
|
|
1996 |
pip_egg_info_path = os.path.join(dist.location,
|
|
1997 |
dist.egg_name()) + '.egg-info'
|
|
1998 |
easy_install_egg = dist.egg_name() + '.egg'
|
|
1999 |
# This won't find a globally-installed develop egg if
|
|
2000 |
# we're in a virtualenv.
|
|
2001 |
# (There doesn't seem to be any metadata in the
|
|
2002 |
# Distribution object for a develop egg that points back
|
|
2003 |
# to its .egg-link and easy-install.pth files). That's
|
|
2004 |
# OK, because we restrict ourselves to making changes
|
|
2005 |
# within sys.prefix anyway.
|
|
2006 |
develop_egg_link = os.path.join(site_packages,
|
|
2007 |
dist.project_name) + '.egg-link'
|
|
2008 |
if os.path.exists(pip_egg_info_path):
|
|
2009 |
# package installed by pip
|
|
2010 |
paths_to_remove.add(pip_egg_info_path)
|
|
2011 |
if dist.has_metadata('installed-files.txt'):
|
|
2012 |
for installed_file in dist.get_metadata('installed-files.txt').splitlines():
|
|
2013 |
path = os.path.normpath(os.path.join(pip_egg_info_path, installed_file))
|
|
2014 |
if os.path.exists(path):
|
|
2015 |
paths_to_remove.add(path)
|
|
2016 |
if dist.has_metadata('top_level.txt'):
|
|
2017 |
for top_level_pkg in [p for p
|
|
2018 |
in dist.get_metadata('top_level.txt').splitlines()
|
|
2019 |
if p]:
|
|
2020 |
path = os.path.join(dist.location, top_level_pkg)
|
|
2021 |
if os.path.exists(path):
|
|
2022 |
paths_to_remove.add(path)
|
|
2023 |
elif os.path.exists(path + '.py'):
|
|
2024 |
paths_to_remove.add(path + '.py')
|
|
2025 |
if os.path.exists(path + '.pyc'):
|
|
2026 |
paths_to_remove.add(path + '.pyc')
|
|
2027 |
|
|
2028 |
elif dist.location.endswith(easy_install_egg):
|
|
2029 |
# package installed by easy_install
|
|
2030 |
paths_to_remove.add(dist.location)
|
|
2031 |
easy_install_pth = os.path.join(os.path.dirname(dist.location),
|
|
2032 |
'easy-install.pth')
|
|
2033 |
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
|
|
2034 |
|
|
2035 |
elif os.path.isfile(develop_egg_link):
|
|
2036 |
# develop egg
|
|
2037 |
fh = open(develop_egg_link, 'r')
|
|
2038 |
link_pointer = os.path.normcase(fh.readline().strip())
|
|
2039 |
fh.close()
|
|
2040 |
assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
|
|
2041 |
paths_to_remove.add(develop_egg_link)
|
|
2042 |
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
|
|
2043 |
'easy-install.pth')
|
|
2044 |
paths_to_remove.add_pth(easy_install_pth, dist.location)
|
|
2045 |
# fix location (so we can uninstall links to sources outside venv)
|
|
2046 |
paths_to_remove.location = develop_egg_link
|
|
2047 |
|
|
2048 |
# find distutils scripts= scripts
|
|
2049 |
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
|
|
2050 |
for script in dist.metadata_listdir('scripts'):
|
|
2051 |
paths_to_remove.add(os.path.join(bin_py, script))
|
|
2052 |
if sys.platform == 'win32':
|
|
2053 |
paths_to_remove.add(os.path.join(bin_py, script) + '.bat')
|
|
2054 |
|
|
2055 |
# find console_scripts
|
|
2056 |
if dist.has_metadata('entry_points.txt'):
|
|
2057 |
config = ConfigParser.SafeConfigParser()
|
|
2058 |
config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
|
|
2059 |
if config.has_section('console_scripts'):
|
|
2060 |
for name, value in config.items('console_scripts'):
|
|
2061 |
paths_to_remove.add(os.path.join(bin_py, name))
|
|
2062 |
if sys.platform == 'win32':
|
|
2063 |
paths_to_remove.add(os.path.join(bin_py, name) + '.exe')
|
|
2064 |
paths_to_remove.add(os.path.join(bin_py, name) + '-script.py')
|
|
2065 |
|
|
2066 |
paths_to_remove.remove(auto_confirm)
|
|
2067 |
self.uninstalled = paths_to_remove
|
|
2068 |
|
|
2069 |
def rollback_uninstall(self):
|
|
2070 |
if self.uninstalled:
|
|
2071 |
self.uninstalled.rollback()
|
|
2072 |
else:
|
|
2073 |
logger.error("Can't rollback %s, nothing uninstalled."
|
|
2074 |
% (self.project_name,))
|
|
2075 |
|
|
2076 |
def archive(self, build_dir):
|
|
2077 |
assert self.source_dir
|
|
2078 |
create_archive = True
|
|
2079 |
archive_name = '%s-%s.zip' % (self.name, self.installed_version)
|
|
2080 |
archive_path = os.path.join(build_dir, archive_name)
|
|
2081 |
if os.path.exists(archive_path):
|
|
2082 |
response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup '
|
|
2083 |
% display_path(archive_path), ('i', 'w', 'b'))
|
|
2084 |
if response == 'i':
|
|
2085 |
create_archive = False
|
|
2086 |
elif response == 'w':
|
|
2087 |
logger.warn('Deleting %s' % display_path(archive_path))
|
|
2088 |
os.remove(archive_path)
|
|
2089 |
elif response == 'b':
|
|
2090 |
dest_file = backup_dir(archive_path)
|
|
2091 |
logger.warn('Backing up %s to %s'
|
|
2092 |
% (display_path(archive_path), display_path(dest_file)))
|
|
2093 |
shutil.move(archive_path, dest_file)
|
|
2094 |
if create_archive:
|
|
2095 |
zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED)
|
|
2096 |
dir = os.path.normcase(os.path.abspath(self.source_dir))
|
|
2097 |
for dirpath, dirnames, filenames in os.walk(dir):
|
|
2098 |
if 'pip-egg-info' in dirnames:
|
|
2099 |
dirnames.remove('pip-egg-info')
|
|
2100 |
for dirname in dirnames:
|
|
2101 |
dirname = os.path.join(dirpath, dirname)
|
|
2102 |
name = self._clean_zip_name(dirname, dir)
|
|
2103 |
zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
|
|
2104 |
zipdir.external_attr = 0755 << 16L
|
|
2105 |
zip.writestr(zipdir, '')
|
|
2106 |
for filename in filenames:
|
|
2107 |
if filename == 'pip-delete-this-directory.txt':
|
|
2108 |
continue
|
|
2109 |
filename = os.path.join(dirpath, filename)
|
|
2110 |
name = self._clean_zip_name(filename, dir)
|
|
2111 |
zip.write(filename, self.name + '/' + name)
|
|
2112 |
zip.close()
|
|
2113 |
logger.indent -= 2
|
|
2114 |
logger.notify('Saved %s' % display_path(archive_path))
|
|
2115 |
|
|
2116 |
def _clean_zip_name(self, name, prefix):
|
|
2117 |
assert name.startswith(prefix+'/'), (
|
|
2118 |
"name %r doesn't start with prefix %r" % (name, prefix))
|
|
2119 |
name = name[len(prefix)+1:]
|
|
2120 |
name = name.replace(os.path.sep, '/')
|
|
2121 |
return name
|
|
2122 |
|
|
2123 |
def install(self, install_options):
|
|
2124 |
if self.editable:
|
|
2125 |
self.install_editable()
|
|
2126 |
return
|
|
2127 |
temp_location = tempfile.mkdtemp('-record', 'pip-')
|
|
2128 |
record_filename = os.path.join(temp_location, 'install-record.txt')
|
|
2129 |
## FIXME: I'm not sure if this is a reasonable location; probably not
|
|
2130 |
## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable
|
|
2131 |
header_dir = os.path.join(os.path.dirname(os.path.dirname(self.source_dir)), 'lib', 'include')
|
|
2132 |
logger.notify('Running setup.py install for %s' % self.name)
|
|
2133 |
logger.indent += 2
|
|
2134 |
try:
|
|
2135 |
call_subprocess(
|
|
2136 |
[sys.executable, '-c',
|
|
2137 |
"import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py),
|
|
2138 |
'install', '--single-version-externally-managed', '--record', record_filename,
|
|
2139 |
'--install-headers', header_dir] + install_options,
|
|
2140 |
cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False)
|
|
2141 |
finally:
|
|
2142 |
logger.indent -= 2
|
|
2143 |
self.install_succeeded = True
|
|
2144 |
f = open(record_filename)
|
|
2145 |
for line in f:
|
|
2146 |
line = line.strip()
|
|
2147 |
if line.endswith('.egg-info'):
|
|
2148 |
egg_info_dir = line
|
|
2149 |
break
|
|
2150 |
else:
|
|
2151 |
logger.warn('Could not find .egg-info directory in install record for %s' % self)
|
|
2152 |
## FIXME: put the record somewhere
|
|
2153 |
return
|
|
2154 |
f.close()
|
|
2155 |
new_lines = []
|
|
2156 |
f = open(record_filename)
|
|
2157 |
for line in f:
|
|
2158 |
filename = line.strip()
|
|
2159 |
if os.path.isdir(filename):
|
|
2160 |
filename += os.path.sep
|
|
2161 |
new_lines.append(make_path_relative(filename, egg_info_dir))
|
|
2162 |
f.close()
|
|
2163 |
f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
|
|
2164 |
f.write('\n'.join(new_lines)+'\n')
|
|
2165 |
f.close()
|
|
2166 |
|
|
2167 |
def remove_temporary_source(self):
|
|
2168 |
"""Remove the source files from this requirement, if they are marked
|
|
2169 |
for deletion"""
|
|
2170 |
if self.is_bundle or os.path.exists(self.delete_marker_filename):
|
|
2171 |
logger.info('Removing source in %s' % self.source_dir)
|
|
2172 |
if self.source_dir:
|
|
2173 |
shutil.rmtree(self.source_dir, ignore_errors=True, onerror=rmtree_errorhandler)
|
|
2174 |
self.source_dir = None
|
|
2175 |
if self._temp_build_dir and os.path.exists(self._temp_build_dir):
|
|
2176 |
shutil.rmtree(self._temp_build_dir, ignore_errors=True, onerror=rmtree_errorhandler)
|
|
2177 |
self._temp_build_dir = None
|
|
2178 |
|
|
2179 |
def install_editable(self):
|
|
2180 |
logger.notify('Running setup.py develop for %s' % self.name)
|
|
2181 |
logger.indent += 2
|
|
2182 |
try:
|
|
2183 |
## FIXME: should we do --install-headers here too?
|
|
2184 |
call_subprocess(
|
|
2185 |
[sys.executable, '-c',
|
|
2186 |
"import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py),
|
|
2187 |
'develop', '--no-deps'], cwd=self.source_dir, filter_stdout=self._filter_install,
|
|
2188 |
show_stdout=False)
|
|
2189 |
finally:
|
|
2190 |
logger.indent -= 2
|
|
2191 |
self.install_succeeded = True
|
|
2192 |
|
|
2193 |
def _filter_install(self, line):
|
|
2194 |
level = Logger.NOTIFY
|
|
2195 |
for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*',
|
|
2196 |
r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$",
|
|
2197 |
r'^byte-compiling ',
|
|
2198 |
# Not sure what this warning is, but it seems harmless:
|
|
2199 |
r"^warning: manifest_maker: standard file '-c' not found$"]:
|
|
2200 |
if re.search(regex, line.strip()):
|
|
2201 |
level = Logger.INFO
|
|
2202 |
break
|
|
2203 |
return (level, line)
|
|
2204 |
|
|
2205 |
def check_if_exists(self):
|
|
2206 |
"""Find an installed distribution that satisfies or conflicts
|
|
2207 |
with this requirement, and set self.satisfied_by or
|
|
2208 |
self.conflicts_with appropriately."""
|
|
2209 |
if self.req is None:
|
|
2210 |
return False
|
|
2211 |
try:
|
|
2212 |
self.satisfied_by = pkg_resources.get_distribution(self.req)
|
|
2213 |
except pkg_resources.DistributionNotFound:
|
|
2214 |
return False
|
|
2215 |
except pkg_resources.VersionConflict:
|
|
2216 |
self.conflicts_with = pkg_resources.get_distribution(self.req.project_name)
|
|
2217 |
return True
|
|
2218 |
|
|
2219 |
@property
|
|
2220 |
def is_bundle(self):
|
|
2221 |
if self._is_bundle is not None:
|
|
2222 |
return self._is_bundle
|
|
2223 |
base = self._temp_build_dir
|
|
2224 |
if not base:
|
|
2225 |
## FIXME: this doesn't seem right:
|
|
2226 |
return False
|
|
2227 |
self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt'))
|
|
2228 |
or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt')))
|
|
2229 |
return self._is_bundle
|
|
2230 |
|
|
2231 |
def bundle_requirements(self):
|
|
2232 |
for dest_dir in self._bundle_editable_dirs:
|
|
2233 |
package = os.path.basename(dest_dir)
|
|
2234 |
## FIXME: svnism:
|
|
2235 |
for vcs_backend in vcs.backends:
|
|
2236 |
url = rev = None
|
|
2237 |
vcs_bundle_file = os.path.join(
|
|
2238 |
dest_dir, vcs_backend.bundle_file)
|
|
2239 |
if os.path.exists(vcs_bundle_file):
|
|
2240 |
vc_type = vcs_backend.name
|
|
2241 |
fp = open(vcs_bundle_file)
|
|
2242 |
content = fp.read()
|
|
2243 |
fp.close()
|
|
2244 |
url, rev = vcs_backend().parse_vcs_bundle_file(content)
|
|
2245 |
break
|
|
2246 |
if url:
|
|
2247 |
url = '%s+%s@%s' % (vc_type, url, rev)
|
|
2248 |
else:
|
|
2249 |
url = None
|
|
2250 |
yield InstallRequirement(
|
|
2251 |
package, self, editable=True, url=url,
|
|
2252 |
update=False, source_dir=dest_dir)
|
|
2253 |
for dest_dir in self._bundle_build_dirs:
|
|
2254 |
package = os.path.basename(dest_dir)
|
|
2255 |
yield InstallRequirement(
|
|
2256 |
package, self,
|
|
2257 |
source_dir=dest_dir)
|
|
2258 |
|
|
2259 |
def move_bundle_files(self, dest_build_dir, dest_src_dir):
|
|
2260 |
base = self._temp_build_dir
|
|
2261 |
assert base
|
|
2262 |
src_dir = os.path.join(base, 'src')
|
|
2263 |
build_dir = os.path.join(base, 'build')
|
|
2264 |
bundle_build_dirs = []
|
|
2265 |
bundle_editable_dirs = []
|
|
2266 |
for source_dir, dest_dir, dir_collection in [
|
|
2267 |
(src_dir, dest_src_dir, bundle_editable_dirs),
|
|
2268 |
(build_dir, dest_build_dir, bundle_build_dirs)]:
|
|
2269 |
if os.path.exists(source_dir):
|
|
2270 |
for dirname in os.listdir(source_dir):
|
|
2271 |
dest = os.path.join(dest_dir, dirname)
|
|
2272 |
dir_collection.append(dest)
|
|
2273 |
if os.path.exists(dest):
|
|
2274 |
logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s'
|
|
2275 |
% (dest, dirname, self))
|
|
2276 |
continue
|
|
2277 |
if not os.path.exists(dest_dir):
|
|
2278 |
logger.info('Creating directory %s' % dest_dir)
|
|
2279 |
os.makedirs(dest_dir)
|
|
2280 |
shutil.move(os.path.join(source_dir, dirname), dest)
|
|
2281 |
if not os.listdir(source_dir):
|
|
2282 |
os.rmdir(source_dir)
|
|
2283 |
self._temp_build_dir = None
|
|
2284 |
self._bundle_build_dirs = bundle_build_dirs
|
|
2285 |
self._bundle_editable_dirs = bundle_editable_dirs
|
|
2286 |
|
|
2287 |
@property
|
|
2288 |
def delete_marker_filename(self):
|
|
2289 |
assert self.source_dir
|
|
2290 |
return os.path.join(self.source_dir, 'pip-delete-this-directory.txt')
|
|
2291 |
|
|
2292 |
DELETE_MARKER_MESSAGE = '''\
|
|
2293 |
This file is placed here by pip to indicate the source was put
|
|
2294 |
here by pip.
|
|
2295 |
|
|
2296 |
Once this package is successfully installed this source code will be
|
|
2297 |
deleted (unless you remove this file).
|
|
2298 |
'''
|
|
2299 |
|
|
2300 |
class RequirementSet(object):
|
|
2301 |
|
|
2302 |
def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
|
|
2303 |
upgrade=False, ignore_installed=False,
|
|
2304 |
ignore_dependencies=False):
|
|
2305 |
self.build_dir = build_dir
|
|
2306 |
self.src_dir = src_dir
|
|
2307 |
self.download_dir = download_dir
|
|
2308 |
self.download_cache = download_cache
|
|
2309 |
self.upgrade = upgrade
|
|
2310 |
self.ignore_installed = ignore_installed
|
|
2311 |
self.requirements = {}
|
|
2312 |
# Mapping of alias: real_name
|
|
2313 |
self.requirement_aliases = {}
|
|
2314 |
self.unnamed_requirements = []
|
|
2315 |
self.ignore_dependencies = ignore_dependencies
|
|
2316 |
self.successfully_downloaded = []
|
|
2317 |
self.successfully_installed = []
|
|
2318 |
|
|
2319 |
def __str__(self):
|
|
2320 |
reqs = [req for req in self.requirements.values()
|
|
2321 |
if not req.comes_from]
|
|
2322 |
reqs.sort(key=lambda req: req.name.lower())
|
|
2323 |
return ' '.join([str(req.req) for req in reqs])
|
|
2324 |
|
|
2325 |
def add_requirement(self, install_req):
|
|
2326 |
name = install_req.name
|
|
2327 |
if not name:
|
|
2328 |
self.unnamed_requirements.append(install_req)
|
|
2329 |
else:
|
|
2330 |
if self.has_requirement(name):
|
|
2331 |
raise InstallationError(
|
|
2332 |
'Double requirement given: %s (aready in %s, name=%r)'
|
|
2333 |
% (install_req, self.get_requirement(name), name))
|
|
2334 |
self.requirements[name] = install_req
|
|
2335 |
## FIXME: what about other normalizations? E.g., _ vs. -?
|
|
2336 |
if name.lower() != name:
|
|
2337 |
self.requirement_aliases[name.lower()] = name
|
|
2338 |
|
|
2339 |
def has_requirement(self, project_name):
|
|
2340 |
for name in project_name, project_name.lower():
|
|
2341 |
if name in self.requirements or name in self.requirement_aliases:
|
|
2342 |
return True
|
|
2343 |
return False
|
|
2344 |
|
|
2345 |
@property
|
|
2346 |
def is_download(self):
|
|
2347 |
if self.download_dir:
|
|
2348 |
self.download_dir = os.path.expanduser(self.download_dir)
|
|
2349 |
if os.path.exists(self.download_dir):
|
|
2350 |
return True
|
|
2351 |
else:
|
|
2352 |
logger.fatal('Could not find download directory')
|
|
2353 |
raise InstallationError(
|
|
2354 |
"Could not find or access download directory '%s'"
|
|
2355 |
% display_path(self.download_dir))
|
|
2356 |
return False
|
|
2357 |
|
|
2358 |
def get_requirement(self, project_name):
|
|
2359 |
for name in project_name, project_name.lower():
|
|
2360 |
if name in self.requirements:
|
|
2361 |
return self.requirements[name]
|
|
2362 |
if name in self.requirement_aliases:
|
|
2363 |
return self.requirements[self.requirement_aliases[name]]
|
|
2364 |
raise KeyError("No project with the name %r" % project_name)
|
|
2365 |
|
|
2366 |
def uninstall(self, auto_confirm=False):
|
|
2367 |
for req in self.requirements.values():
|
|
2368 |
req.uninstall(auto_confirm=auto_confirm)
|
|
2369 |
|
|
2370 |
def install_files(self, finder, force_root_egg_info=False):
|
|
2371 |
unnamed = list(self.unnamed_requirements)
|
|
2372 |
reqs = self.requirements.values()
|
|
2373 |
while reqs or unnamed:
|
|
2374 |
if unnamed:
|
|
2375 |
req_to_install = unnamed.pop(0)
|
|
2376 |
else:
|
|
2377 |
req_to_install = reqs.pop(0)
|
|
2378 |
install = True
|
|
2379 |
if not self.ignore_installed and not req_to_install.editable:
|
|
2380 |
req_to_install.check_if_exists()
|
|
2381 |
if req_to_install.satisfied_by:
|
|
2382 |
if self.upgrade:
|
|
2383 |
req_to_install.conflicts_with = req_to_install.satisfied_by
|
|
2384 |
req_to_install.satisfied_by = None
|
|
2385 |
else:
|
|
2386 |
install = False
|
|
2387 |
if req_to_install.satisfied_by:
|
|
2388 |
logger.notify('Requirement already satisfied '
|
|
2389 |
'(use --upgrade to upgrade): %s'
|
|
2390 |
% req_to_install)
|
|
2391 |
if req_to_install.editable:
|
|
2392 |
logger.notify('Obtaining %s' % req_to_install)
|
|
2393 |
elif install:
|
|
2394 |
if req_to_install.url and req_to_install.url.lower().startswith('file:'):
|
|
2395 |
logger.notify('Unpacking %s' % display_path(url_to_filename(req_to_install.url)))
|
|
2396 |
else:
|
|
2397 |
logger.notify('Downloading/unpacking %s' % req_to_install)
|
|
2398 |
logger.indent += 2
|
|
2399 |
is_bundle = False
|
|
2400 |
try:
|
|
2401 |
if req_to_install.editable:
|
|
2402 |
if req_to_install.source_dir is None:
|
|
2403 |
location = req_to_install.build_location(self.src_dir)
|
|
2404 |
req_to_install.source_dir = location
|
|
2405 |
else:
|
|
2406 |
location = req_to_install.source_dir
|
|
2407 |
if not os.path.exists(self.build_dir):
|
|
2408 |
os.makedirs(self.build_dir)
|
|
2409 |
req_to_install.update_editable(not self.is_download)
|
|
2410 |
if self.is_download:
|
|
2411 |
req_to_install.run_egg_info()
|
|
2412 |
req_to_install.archive(self.download_dir)
|
|
2413 |
else:
|
|
2414 |
req_to_install.run_egg_info()
|
|
2415 |
elif install:
|
|
2416 |
location = req_to_install.build_location(self.build_dir, not self.is_download)
|
|
2417 |
## FIXME: is the existance of the checkout good enough to use it? I don't think so.
|
|
2418 |
unpack = True
|
|
2419 |
if not os.path.exists(os.path.join(location, 'setup.py')):
|
|
2420 |
## FIXME: this won't upgrade when there's an existing package unpacked in `location`
|
|
2421 |
if req_to_install.url is None:
|
|
2422 |
url = finder.find_requirement(req_to_install, upgrade=self.upgrade)
|
|
2423 |
else:
|
|
2424 |
## FIXME: should req_to_install.url already be a link?
|
|
2425 |
url = Link(req_to_install.url)
|
|
2426 |
assert url
|
|
2427 |
if url:
|
|
2428 |
try:
|
|
2429 |
self.unpack_url(url, location, self.is_download)
|
|
2430 |
except urllib2.HTTPError, e:
|
|
2431 |
logger.fatal('Could not install requirement %s because of error %s'
|
|
2432 |
% (req_to_install, e))
|
|
2433 |
raise InstallationError(
|
|
2434 |
'Could not install requirement %s because of HTTP error %s for URL %s'
|
|
2435 |
% (req_to_install, e, url))
|
|
2436 |
else:
|
|
2437 |
unpack = False
|
|
2438 |
if unpack:
|
|
2439 |
is_bundle = req_to_install.is_bundle
|
|
2440 |
url = None
|
|
2441 |
if is_bundle:
|
|
2442 |
req_to_install.move_bundle_files(self.build_dir, self.src_dir)
|
|
2443 |
for subreq in req_to_install.bundle_requirements():
|
|
2444 |
reqs.append(subreq)
|
|
2445 |
self.add_requirement(subreq)
|
|
2446 |
elif self.is_download:
|
|
2447 |
req_to_install.source_dir = location
|
|
2448 |
if url and url.scheme in vcs.all_schemes:
|
|
2449 |
req_to_install.run_egg_info()
|
|
2450 |
req_to_install.archive(self.download_dir)
|
|
2451 |
else:
|
|
2452 |
req_to_install.source_dir = location
|
|
2453 |
req_to_install.run_egg_info()
|
|
2454 |
if force_root_egg_info:
|
|
2455 |
# We need to run this to make sure that the .egg-info/
|
|
2456 |
# directory is created for packing in the bundle
|
|
2457 |
req_to_install.run_egg_info(force_root_egg_info=True)
|
|
2458 |
req_to_install.assert_source_matches_version()
|
|
2459 |
f = open(req_to_install.delete_marker_filename, 'w')
|
|
2460 |
f.write(DELETE_MARKER_MESSAGE)
|
|
2461 |
f.close()
|
|
2462 |
if not is_bundle and not self.is_download:
|
|
2463 |
## FIXME: shouldn't be globally added:
|
|
2464 |
finder.add_dependency_links(req_to_install.dependency_links)
|
|
2465 |
## FIXME: add extras in here:
|
|
2466 |
if not self.ignore_dependencies:
|
|
2467 |
for req in req_to_install.requirements():
|
|
2468 |
try:
|
|
2469 |
name = pkg_resources.Requirement.parse(req).project_name
|
|
2470 |
except ValueError, e:
|
|
2471 |
## FIXME: proper warning
|
|
2472 |
logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install))
|
|
2473 |
continue
|
|
2474 |
if self.has_requirement(name):
|
|
2475 |
## FIXME: check for conflict
|
|
2476 |
continue
|
|
2477 |
subreq = InstallRequirement(req, req_to_install)
|
|
2478 |
reqs.append(subreq)
|
|
2479 |
self.add_requirement(subreq)
|
|
2480 |
if req_to_install.name not in self.requirements:
|
|
2481 |
self.requirements[req_to_install.name] = req_to_install
|
|
2482 |
else:
|
|
2483 |
req_to_install.remove_temporary_source()
|
|
2484 |
if install:
|
|
2485 |
self.successfully_downloaded.append(req_to_install)
|
|
2486 |
finally:
|
|
2487 |
logger.indent -= 2
|
|
2488 |
|
|
2489 |
def unpack_url(self, link, location, only_download=False):
|
|
2490 |
if only_download:
|
|
2491 |
location = self.download_dir
|
|
2492 |
for backend in vcs.backends:
|
|
2493 |
if link.scheme in backend.schemes:
|
|
2494 |
vcs_backend = backend(link.url)
|
|
2495 |
if only_download:
|
|
2496 |
vcs_backend.export(location)
|
|
2497 |
else:
|
|
2498 |
vcs_backend.unpack(location)
|
|
2499 |
return
|
|
2500 |
dir = tempfile.mkdtemp()
|
|
2501 |
if link.url.lower().startswith('file:'):
|
|
2502 |
source = url_to_filename(link.url)
|
|
2503 |
content_type = mimetypes.guess_type(source)[0]
|
|
2504 |
self.unpack_file(source, location, content_type, link)
|
|
2505 |
return
|
|
2506 |
md5_hash = link.md5_hash
|
|
2507 |
target_url = link.url.split('#', 1)[0]
|
|
2508 |
target_file = None
|
|
2509 |
if self.download_cache:
|
|
2510 |
if not os.path.isdir(self.download_cache):
|
|
2511 |
logger.indent -= 2
|
|
2512 |
logger.notify('Creating supposed download cache at %s' % self.download_cache)
|
|
2513 |
logger.indent += 2
|
|
2514 |
os.makedirs(self.download_cache)
|
|
2515 |
target_file = os.path.join(self.download_cache,
|
|
2516 |
urllib.quote(target_url, ''))
|
|
2517 |
if (target_file and os.path.exists(target_file)
|
|
2518 |
and os.path.exists(target_file+'.content-type')):
|
|
2519 |
fp = open(target_file+'.content-type')
|
|
2520 |
content_type = fp.read().strip()
|
|
2521 |
fp.close()
|
|
2522 |
if md5_hash:
|
|
2523 |
download_hash = md5()
|
|
2524 |
fp = open(target_file, 'rb')
|
|
2525 |
while 1:
|
|
2526 |
chunk = fp.read(4096)
|
|
2527 |
if not chunk:
|
|
2528 |
break
|
|
2529 |
download_hash.update(chunk)
|
|
2530 |
fp.close()
|
|
2531 |
temp_location = target_file
|
|
2532 |
logger.notify('Using download cache from %s' % target_file)
|
|
2533 |
else:
|
|
2534 |
try:
|
|
2535 |
resp = urllib2.urlopen(target_url)
|
|
2536 |
except urllib2.HTTPError, e:
|
|
2537 |
logger.fatal("HTTP error %s while getting %s" % (e.code, link))
|
|
2538 |
raise
|
|
2539 |
except IOError, e:
|
|
2540 |
# Typically an FTP error
|
|
2541 |
logger.fatal("Error %s while getting %s" % (e, link))
|
|
2542 |
raise
|
|
2543 |
content_type = resp.info()['content-type']
|
|
2544 |
filename = link.filename
|
|
2545 |
ext = splitext(filename)[1]
|
|
2546 |
if not ext:
|
|
2547 |
ext = mimetypes.guess_extension(content_type)
|
|
2548 |
if ext:
|
|
2549 |
filename += ext
|
|
2550 |
if not ext and link.url != resp.geturl():
|
|
2551 |
ext = os.path.splitext(resp.geturl())[1]
|
|
2552 |
if ext:
|
|
2553 |
filename += ext
|
|
2554 |
temp_location = os.path.join(dir, filename)
|
|
2555 |
fp = open(temp_location, 'wb')
|
|
2556 |
if md5_hash:
|
|
2557 |
download_hash = md5()
|
|
2558 |
try:
|
|
2559 |
total_length = int(resp.info()['content-length'])
|
|
2560 |
except (ValueError, KeyError):
|
|
2561 |
total_length = 0
|
|
2562 |
downloaded = 0
|
|
2563 |
show_progress = total_length > 40*1000 or not total_length
|
|
2564 |
show_url = link.show_url
|
|
2565 |
try:
|
|
2566 |
if show_progress:
|
|
2567 |
## FIXME: the URL can get really long in this message:
|
|
2568 |
if total_length:
|
|
2569 |
logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
|
|
2570 |
else:
|
|
2571 |
logger.start_progress('Downloading %s (unknown size): ' % show_url)
|
|
2572 |
else:
|
|
2573 |
logger.notify('Downloading %s' % show_url)
|
|
2574 |
logger.debug('Downloading from URL %s' % link)
|
|
2575 |
while 1:
|
|
2576 |
chunk = resp.read(4096)
|
|
2577 |
if not chunk:
|
|
2578 |
break
|
|
2579 |
downloaded += len(chunk)
|
|
2580 |
if show_progress:
|
|
2581 |
if not total_length:
|
|
2582 |
logger.show_progress('%s' % format_size(downloaded))
|
|
2583 |
else:
|
|
2584 |
logger.show_progress('%3i%% %s' % (100*downloaded/total_length, format_size(downloaded)))
|
|
2585 |
if md5_hash:
|
|
2586 |
download_hash.update(chunk)
|
|
2587 |
fp.write(chunk)
|
|
2588 |
fp.close()
|
|
2589 |
finally:
|
|
2590 |
if show_progress:
|
|
2591 |
logger.end_progress('%s downloaded' % format_size(downloaded))
|
|
2592 |
if md5_hash:
|
|
2593 |
download_hash = download_hash.hexdigest()
|
|
2594 |
if download_hash != md5_hash:
|
|
2595 |
logger.fatal("MD5 hash of the package %s (%s) doesn't match the expected hash %s!"
|
|
2596 |
% (link, download_hash, md5_hash))
|
|
2597 |
raise InstallationError('Bad MD5 hash for package %s' % link)
|
|
2598 |
if only_download:
|
|
2599 |
self.copy_file(temp_location, location, content_type, link)
|
|
2600 |
else:
|
|
2601 |
self.unpack_file(temp_location, location, content_type, link)
|
|
2602 |
if target_file and target_file != temp_location:
|
|
2603 |
logger.notify('Storing download in cache at %s' % display_path(target_file))
|
|
2604 |
shutil.copyfile(temp_location, target_file)
|
|
2605 |
fp = open(target_file+'.content-type', 'w')
|
|
2606 |
fp.write(content_type)
|
|
2607 |
fp.close()
|
|
2608 |
os.unlink(temp_location)
|
|
2609 |
if target_file is None:
|
|
2610 |
os.unlink(temp_location)
|
|
2611 |
|
|
2612 |
def copy_file(self, filename, location, content_type, link):
|
|
2613 |
copy = True
|
|
2614 |
download_location = os.path.join(location, link.filename)
|
|
2615 |
if os.path.exists(download_location):
|
|
2616 |
response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup '
|
|
2617 |
% display_path(download_location), ('i', 'w', 'b'))
|
|
2618 |
if response == 'i':
|
|
2619 |
copy = False
|
|
2620 |
elif response == 'w':
|
|
2621 |
logger.warn('Deleting %s' % display_path(download_location))
|
|
2622 |
os.remove(download_location)
|
|
2623 |
elif response == 'b':
|
|
2624 |
dest_file = backup_dir(download_location)
|
|
2625 |
logger.warn('Backing up %s to %s'
|
|
2626 |
% (display_path(download_location), display_path(dest_file)))
|
|
2627 |
shutil.move(download_location, dest_file)
|
|
2628 |
if copy:
|
|
2629 |
shutil.copy(filename, download_location)
|
|
2630 |
logger.indent -= 2
|
|
2631 |
logger.notify('Saved %s' % display_path(download_location))
|
|
2632 |
|
|
2633 |
def unpack_file(self, filename, location, content_type, link):
|
|
2634 |
if (content_type == 'application/zip'
|
|
2635 |
or filename.endswith('.zip')
|
|
2636 |
or filename.endswith('.pybundle')
|
|
2637 |
or zipfile.is_zipfile(filename)):
|
|
2638 |
self.unzip_file(filename, location, flatten=not filename.endswith('.pybundle'))
|
|
2639 |
elif (content_type == 'application/x-gzip'
|
|
2640 |
or tarfile.is_tarfile(filename)
|
|
2641 |
or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
|
|
2642 |
self.untar_file(filename, location)
|
|
2643 |
elif (content_type and content_type.startswith('text/html')
|
|
2644 |
and is_svn_page(file_contents(filename))):
|
|
2645 |
# We don't really care about this
|
|
2646 |
Subversion('svn+' + link.url).unpack(location)
|
|
2647 |
else:
|
|
2648 |
## FIXME: handle?
|
|
2649 |
## FIXME: magic signatures?
|
|
2650 |
logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
|
|
2651 |
% (filename, location, content_type))
|
|
2652 |
raise InstallationError('Cannot determine archive format of %s' % location)
|
|
2653 |
|
|
2654 |
def unzip_file(self, filename, location, flatten=True):
|
|
2655 |
"""Unzip the file (zip file located at filename) to the destination
|
|
2656 |
location"""
|
|
2657 |
if not os.path.exists(location):
|
|
2658 |
os.makedirs(location)
|
|
2659 |
zipfp = open(filename, 'rb')
|
|
2660 |
try:
|
|
2661 |
zip = zipfile.ZipFile(zipfp)
|
|
2662 |
leading = has_leading_dir(zip.namelist()) and flatten
|
|
2663 |
for name in zip.namelist():
|
|
2664 |
data = zip.read(name)
|
|
2665 |
fn = name
|
|
2666 |
if leading:
|
|
2667 |
fn = split_leading_dir(name)[1]
|
|
2668 |
fn = os.path.join(location, fn)
|
|
2669 |
dir = os.path.dirname(fn)
|
|
2670 |
if not os.path.exists(dir):
|
|
2671 |
os.makedirs(dir)
|
|
2672 |
if fn.endswith('/') or fn.endswith('\\'):
|
|
2673 |
# A directory
|
|
2674 |
if not os.path.exists(fn):
|
|
2675 |
os.makedirs(fn)
|
|
2676 |
else:
|
|
2677 |
fp = open(fn, 'wb')
|
|
2678 |
try:
|
|
2679 |
fp.write(data)
|
|
2680 |
finally:
|
|
2681 |
fp.close()
|
|
2682 |
finally:
|
|
2683 |
zipfp.close()
|
|
2684 |
|
|
2685 |
def untar_file(self, filename, location):
|
|
2686 |
"""Untar the file (tar file located at filename) to the destination location"""
|
|
2687 |
if not os.path.exists(location):
|
|
2688 |
os.makedirs(location)
|
|
2689 |
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
|
|
2690 |
mode = 'r:gz'
|
|
2691 |
elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
|
|
2692 |
mode = 'r:bz2'
|
|
2693 |
elif filename.lower().endswith('.tar'):
|
|
2694 |
mode = 'r'
|
|
2695 |
else:
|
|
2696 |
logger.warn('Cannot determine compression type for file %s' % filename)
|
|
2697 |
mode = 'r:*'
|
|
2698 |
tar = tarfile.open(filename, mode)
|
|
2699 |
try:
|
|
2700 |
leading = has_leading_dir([member.name for member in tar.getmembers()])
|
|
2701 |
for member in tar.getmembers():
|
|
2702 |
fn = member.name
|
|
2703 |
if leading:
|
|
2704 |
fn = split_leading_dir(fn)[1]
|
|
2705 |
path = os.path.join(location, fn)
|
|
2706 |
if member.isdir():
|
|
2707 |
if not os.path.exists(path):
|
|
2708 |
os.makedirs(path)
|
|
2709 |
else:
|
|
2710 |
try:
|
|
2711 |
fp = tar.extractfile(member)
|
|
2712 |
except (KeyError, AttributeError), e:
|
|
2713 |
# Some corrupt tar files seem to produce this
|
|
2714 |
# (specifically bad symlinks)
|
|
2715 |
logger.warn(
|
|
2716 |
'In the tar file %s the member %s is invalid: %s'
|
|
2717 |
% (filename, member.name, e))
|
|
2718 |
continue
|
|
2719 |
if not os.path.exists(os.path.dirname(path)):
|
|
2720 |
os.makedirs(os.path.dirname(path))
|
|
2721 |
destfp = open(path, 'wb')
|
|
2722 |
try:
|
|
2723 |
shutil.copyfileobj(fp, destfp)
|
|
2724 |
finally:
|
|
2725 |
destfp.close()
|
|
2726 |
fp.close()
|
|
2727 |
finally:
|
|
2728 |
tar.close()
|
|
2729 |
|
|
2730 |
def install(self, install_options):
|
|
2731 |
"""Install everything in this set (after having downloaded and unpacked the packages)"""
|
|
2732 |
to_install = sorted([r for r in self.requirements.values()
|
|
2733 |
if self.upgrade or not r.satisfied_by],
|
|
2734 |
key=lambda p: p.name.lower())
|
|
2735 |
if to_install:
|
|
2736 |
logger.notify('Installing collected packages: %s' % (', '.join([req.name for req in to_install])))
|
|
2737 |
logger.indent += 2
|
|
2738 |
try:
|
|
2739 |
for requirement in to_install:
|
|
2740 |
if requirement.conflicts_with:
|
|
2741 |
logger.notify('Found existing installation: %s'
|
|
2742 |
% requirement.conflicts_with)
|
|
2743 |
logger.indent += 2
|
|
2744 |
try:
|
|
2745 |
requirement.uninstall(auto_confirm=True)
|
|
2746 |
finally:
|
|
2747 |
logger.indent -= 2
|
|
2748 |
try:
|
|
2749 |
requirement.install(install_options)
|
|
2750 |
except:
|
|
2751 |
# if install did not succeed, rollback previous uninstall
|
|
2752 |
if requirement.conflicts_with and not requirement.install_succeeded:
|
|
2753 |
requirement.rollback_uninstall()
|
|
2754 |
raise
|
|
2755 |
requirement.remove_temporary_source()
|
|
2756 |
finally:
|
|
2757 |
logger.indent -= 2
|
|
2758 |
self.successfully_installed = to_install
|
|
2759 |
|
|
2760 |
def create_bundle(self, bundle_filename):
|
|
2761 |
## FIXME: can't decide which is better; zip is easier to read
|
|
2762 |
## random files from, but tar.bz2 is smaller and not as lame a
|
|
2763 |
## format.
|
|
2764 |
|
|
2765 |
## FIXME: this file should really include a manifest of the
|
|
2766 |
## packages, maybe some other metadata files. It would make
|
|
2767 |
## it easier to detect as well.
|
|
2768 |
zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED)
|
|
2769 |
vcs_dirs = []
|
|
2770 |
for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'):
|
|
2771 |
dir = os.path.normcase(os.path.abspath(dir))
|
|
2772 |
for dirpath, dirnames, filenames in os.walk(dir):
|
|
2773 |
for backend in vcs.backends:
|
|
2774 |
vcs_backend = backend()
|
|
2775 |
vcs_url = vcs_rev = None
|
|
2776 |
if vcs_backend.dirname in dirnames:
|
|
2777 |
for vcs_dir in vcs_dirs:
|
|
2778 |
if dirpath.startswith(vcs_dir):
|
|
2779 |
# vcs bundle file already in parent directory
|
|
2780 |
break
|
|
2781 |
else:
|
|
2782 |
vcs_url, vcs_rev = vcs_backend.get_info(
|
|
2783 |
os.path.join(dir, dirpath))
|
|
2784 |
vcs_dirs.append(dirpath)
|
|
2785 |
vcs_bundle_file = vcs_backend.bundle_file
|
|
2786 |
vcs_guide = vcs_backend.guide % {'url': vcs_url,
|
|
2787 |
'rev': vcs_rev}
|
|
2788 |
dirnames.remove(vcs_backend.dirname)
|
|
2789 |
break
|
|
2790 |
if 'pip-egg-info' in dirnames:
|
|
2791 |
dirnames.remove('pip-egg-info')
|
|
2792 |
for dirname in dirnames:
|
|
2793 |
dirname = os.path.join(dirpath, dirname)
|
|
2794 |
name = self._clean_zip_name(dirname, dir)
|
|
2795 |
zip.writestr(basename + '/' + name + '/', '')
|
|
2796 |
for filename in filenames:
|
|
2797 |
if filename == 'pip-delete-this-directory.txt':
|
|
2798 |
continue
|
|
2799 |
filename = os.path.join(dirpath, filename)
|
|
2800 |
name = self._clean_zip_name(filename, dir)
|
|
2801 |
zip.write(filename, basename + '/' + name)
|
|
2802 |
if vcs_url:
|
|
2803 |
name = os.path.join(dirpath, vcs_bundle_file)
|
|
2804 |
name = self._clean_zip_name(name, dir)
|
|
2805 |
zip.writestr(basename + '/' + name, vcs_guide)
|
|
2806 |
|
|
2807 |
zip.writestr('pip-manifest.txt', self.bundle_requirements())
|
|
2808 |
zip.close()
|
|
2809 |
# Unlike installation, this will always delete the build directories
|
|
2810 |
logger.info('Removing temporary build dir %s and source dir %s'
|
|
2811 |
% (self.build_dir, self.src_dir))
|
|
2812 |
for dir in self.build_dir, self.src_dir:
|
|
2813 |
if os.path.exists(dir):
|
|
2814 |
shutil.rmtree(dir)
|
|
2815 |
|
|
2816 |
|
|
2817 |
BUNDLE_HEADER = '''\
|
|
2818 |
# This is a pip bundle file, that contains many source packages
|
|
2819 |
# that can be installed as a group. You can install this like:
|
|
2820 |
# pip this_file.zip
|
|
2821 |
# The rest of the file contains a list of all the packages included:
|
|
2822 |
'''
|
|
2823 |
|
|
2824 |
def bundle_requirements(self):
|
|
2825 |
parts = [self.BUNDLE_HEADER]
|
|
2826 |
for req in sorted(
|
|
2827 |
[req for req in self.requirements.values()
|
|
2828 |
if not req.comes_from],
|
|
2829 |
key=lambda x: x.name):
|
|
2830 |
parts.append('%s==%s\n' % (req.name, req.installed_version))
|
|
2831 |
parts.append('# These packages were installed to satisfy the above requirements:\n')
|
|
2832 |
for req in sorted(
|
|
2833 |
[req for req in self.requirements.values()
|
|
2834 |
if req.comes_from],
|
|
2835 |
key=lambda x: x.name):
|
|
2836 |
parts.append('%s==%s\n' % (req.name, req.installed_version))
|
|
2837 |
## FIXME: should we do something with self.unnamed_requirements?
|
|
2838 |
return ''.join(parts)
|
|
2839 |
|
|
2840 |
def _clean_zip_name(self, name, prefix):
|
|
2841 |
assert name.startswith(prefix+'/'), (
|
|
2842 |
"name %r doesn't start with prefix %r" % (name, prefix))
|
|
2843 |
name = name[len(prefix)+1:]
|
|
2844 |
name = name.replace(os.path.sep, '/')
|
|
2845 |
return name
|
|
2846 |
|
|
2847 |
class HTMLPage(object):
|
|
2848 |
"""Represents one page, along with its URL"""
|
|
2849 |
|
|
2850 |
## FIXME: these regexes are horrible hacks:
|
|
2851 |
_homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
|
|
2852 |
_download_re = re.compile(r'<th>\s*download\s+url', re.I)
|
|
2853 |
## These aren't so aweful:
|
|
2854 |
_rel_re = re.compile("""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""", re.I)
|
|
2855 |
_href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
|
|
2856 |
_base_re = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I)
|
|
2857 |
|
|
2858 |
def __init__(self, content, url, headers=None):
|
|
2859 |
self.content = content
|
|
2860 |
self.url = url
|
|
2861 |
self.headers = headers
|
|
2862 |
|
|
2863 |
def __str__(self):
|
|
2864 |
return self.url
|
|
2865 |
|
|
2866 |
@classmethod
|
|
2867 |
def get_page(cls, link, req, cache=None, skip_archives=True):
|
|
2868 |
url = link.url
|
|
2869 |
url = url.split('#', 1)[0]
|
|
2870 |
if cache.too_many_failures(url):
|
|
2871 |
return None
|
|
2872 |
if url.lower().startswith('svn'):
|
|
2873 |
logger.debug('Cannot look at svn URL %s' % link)
|
|
2874 |
return None
|
|
2875 |
if cache is not None:
|
|
2876 |
inst = cache.get_page(url)
|
|
2877 |
if inst is not None:
|
|
2878 |
return inst
|
|
2879 |
try:
|
|
2880 |
if skip_archives:
|
|
2881 |
if cache is not None:
|
|
2882 |
if cache.is_archive(url):
|
|
2883 |
return None
|
|
2884 |
filename = link.filename
|
|
2885 |
for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
|
|
2886 |
if filename.endswith(bad_ext):
|
|
2887 |
content_type = cls._get_content_type(url)
|
|
2888 |
if content_type.lower().startswith('text/html'):
|
|
2889 |
break
|
|
2890 |
else:
|
|
2891 |
logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
|
|
2892 |
if cache is not None:
|
|
2893 |
cache.set_is_archive(url)
|
|
2894 |
return None
|
|
2895 |
logger.debug('Getting page %s' % url)
|
|
2896 |
resp = urllib2.urlopen(url)
|
|
2897 |
real_url = resp.geturl()
|
|
2898 |
headers = resp.info()
|
|
2899 |
inst = cls(resp.read(), real_url, headers)
|
|
2900 |
except (urllib2.HTTPError, urllib2.URLError, socket.timeout, socket.error), e:
|
|
2901 |
desc = str(e)
|
|
2902 |
if isinstance(e, socket.timeout):
|
|
2903 |
log_meth = logger.info
|
|
2904 |
level =1
|
|
2905 |
desc = 'timed out'
|
|
2906 |
elif isinstance(e, urllib2.URLError):
|
|
2907 |
log_meth = logger.info
|
|
2908 |
if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout):
|
|
2909 |
desc = 'timed out'
|
|
2910 |
level = 1
|
|
2911 |
else:
|
|
2912 |
level = 2
|
|
2913 |
elif isinstance(e, urllib2.HTTPError) and e.code == 404:
|
|
2914 |
## FIXME: notify?
|
|
2915 |
log_meth = logger.info
|
|
2916 |
level = 2
|
|
2917 |
else:
|
|
2918 |
log_meth = logger.info
|
|
2919 |
level = 1
|
|
2920 |
log_meth('Could not fetch URL %s: %s' % (link, desc))
|
|
2921 |
log_meth('Will skip URL %s when looking for download links for %s' % (link.url, req))
|
|
2922 |
if cache is not None:
|
|
2923 |
cache.add_page_failure(url, level)
|
|
2924 |
return None
|
|
2925 |
if cache is not None:
|
|
2926 |
cache.add_page([url, real_url], inst)
|
|
2927 |
return inst
|
|
2928 |
|
|
2929 |
@staticmethod
|
|
2930 |
def _get_content_type(url):
|
|
2931 |
"""Get the Content-Type of the given url, using a HEAD request"""
|
|
2932 |
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
|
|
2933 |
if scheme == 'http':
|
|
2934 |
ConnClass = httplib.HTTPConnection
|
|
2935 |
elif scheme == 'https':
|
|
2936 |
ConnClass = httplib.HTTPSConnection
|
|
2937 |
else:
|
|
2938 |
## FIXME: some warning or something?
|
|
2939 |
## assertion error?
|
|
2940 |
return ''
|
|
2941 |
if query:
|
|
2942 |
path += '?' + query
|
|
2943 |
conn = ConnClass(netloc)
|
|
2944 |
try:
|
|
2945 |
conn.request('HEAD', path, headers={'Host': netloc})
|
|
2946 |
resp = conn.getresponse()
|
|
2947 |
if resp.status != 200:
|
|
2948 |
## FIXME: doesn't handle redirects
|
|
2949 |
return ''
|
|
2950 |
return resp.getheader('Content-Type') or ''
|
|
2951 |
finally:
|
|
2952 |
conn.close()
|
|
2953 |
|
|
2954 |
@property
|
|
2955 |
def base_url(self):
|
|
2956 |
if not hasattr(self, "_base_url"):
|
|
2957 |
match = self._base_re.search(self.content)
|
|
2958 |
if match:
|
|
2959 |
self._base_url = match.group(1)
|
|
2960 |
else:
|
|
2961 |
self._base_url = self.url
|
|
2962 |
return self._base_url
|
|
2963 |
|
|
2964 |
@property
|
|
2965 |
def links(self):
|
|
2966 |
"""Yields all links in the page"""
|
|
2967 |
for match in self._href_re.finditer(self.content):
|
|
2968 |
url = match.group(1) or match.group(2) or match.group(3)
|
|
2969 |
url = self.clean_link(urlparse.urljoin(self.base_url, url))
|
|
2970 |
yield Link(url, self)
|
|
2971 |
|
|
2972 |
def rel_links(self):
|
|
2973 |
for url in self.explicit_rel_links():
|
|
2974 |
yield url
|
|
2975 |
for url in self.scraped_rel_links():
|
|
2976 |
yield url
|
|
2977 |
|
|
2978 |
def explicit_rel_links(self, rels=('homepage', 'download')):
|
|
2979 |
"""Yields all links with the given relations"""
|
|
2980 |
for match in self._rel_re.finditer(self.content):
|
|
2981 |
found_rels = match.group(1).lower().split()
|
|
2982 |
for rel in rels:
|
|
2983 |
if rel in found_rels:
|
|
2984 |
break
|
|
2985 |
else:
|
|
2986 |
continue
|
|
2987 |
match = self._href_re.search(match.group(0))
|
|
2988 |
if not match:
|
|
2989 |
continue
|
|
2990 |
url = match.group(1) or match.group(2) or match.group(3)
|
|
2991 |
url = self.clean_link(urlparse.urljoin(self.base_url, url))
|
|
2992 |
yield Link(url, self)
|
|
2993 |
|
|
2994 |
def scraped_rel_links(self):
|
|
2995 |
for regex in (self._homepage_re, self._download_re):
|
|
2996 |
match = regex.search(self.content)
|
|
2997 |
if not match:
|
|
2998 |
continue
|
|
2999 |
href_match = self._href_re.search(self.content, pos=match.end())
|
|
3000 |
if not href_match:
|
|
3001 |
continue
|
|
3002 |
url = match.group(1) or match.group(2) or match.group(3)
|
|
3003 |
if not url:
|
|
3004 |
continue
|
|
3005 |
url = self.clean_link(urlparse.urljoin(self.base_url, url))
|
|
3006 |
yield Link(url, self)
|
|
3007 |
|
|
3008 |
_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
|
|
3009 |
|
|
3010 |
def clean_link(self, url):
|
|
3011 |
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
|
|
3012 |
the link, it will be rewritten to %20 (while not over-quoting
|
|
3013 |
% or other characters)."""
|
|
3014 |
return self._clean_re.sub(
|
|
3015 |
lambda match: '%%%2x' % ord(match.group(0)), url)
|
|
3016 |
|
|
3017 |
class PageCache(object):
|
|
3018 |
"""Cache of HTML pages"""
|
|
3019 |
|
|
3020 |
failure_limit = 3
|
|
3021 |
|
|
3022 |
def __init__(self):
|
|
3023 |
self._failures = {}
|
|
3024 |
self._pages = {}
|
|
3025 |
self._archives = {}
|
|
3026 |
|
|
3027 |
def too_many_failures(self, url):
|
|
3028 |
return self._failures.get(url, 0) >= self.failure_limit
|
|
3029 |
|
|
3030 |
def get_page(self, url):
|
|
3031 |
return self._pages.get(url)
|
|
3032 |
|
|
3033 |
def is_archive(self, url):
|
|
3034 |
return self._archives.get(url, False)
|
|
3035 |
|
|
3036 |
def set_is_archive(self, url, value=True):
|
|
3037 |
self._archives[url] = value
|
|
3038 |
|
|
3039 |
def add_page_failure(self, url, level):
|
|
3040 |
self._failures[url] = self._failures.get(url, 0)+level
|
|
3041 |
|
|
3042 |
def add_page(self, urls, page):
|
|
3043 |
for url in urls:
|
|
3044 |
self._pages[url] = page
|
|
3045 |
|
|
3046 |
class Link(object):
|
|
3047 |
|
|
3048 |
def __init__(self, url, comes_from=None):
|
|
3049 |
self.url = url
|
|
3050 |
self.comes_from = comes_from
|
|
3051 |
|
|
3052 |
def __str__(self):
|
|
3053 |
if self.comes_from:
|
|
3054 |
return '%s (from %s)' % (self.url, self.comes_from)
|
|
3055 |
else:
|
|
3056 |
return self.url
|
|
3057 |
|
|
3058 |
def __repr__(self):
|
|
3059 |
return '<Link %s>' % self
|
|
3060 |
|
|
3061 |
def __eq__(self, other):
|
|
3062 |
return self.url == other.url
|
|
3063 |
|
|
3064 |
def __hash__(self):
|
|
3065 |
return hash(self.url)
|
|
3066 |
|
|
3067 |
@property
|
|
3068 |
def filename(self):
|
|
3069 |
url = self.url
|
|
3070 |
url = url.split('#', 1)[0]
|
|
3071 |
url = url.split('?', 1)[0]
|
|
3072 |
url = url.rstrip('/')
|
|
3073 |
name = posixpath.basename(url)
|
|
3074 |
assert name, (
|
|
3075 |
'URL %r produced no filename' % url)
|
|
3076 |
return name
|
|
3077 |
|
|
3078 |
@property
|
|
3079 |
def scheme(self):
|
|
3080 |
return urlparse.urlsplit(self.url)[0]
|
|
3081 |
|
|
3082 |
@property
|
|
3083 |
def path(self):
|
|
3084 |
return urlparse.urlsplit(self.url)[2]
|
|
3085 |
|
|
3086 |
def splitext(self):
|
|
3087 |
return splitext(posixpath.basename(self.path.rstrip('/')))
|
|
3088 |
|
|
3089 |
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
|
|
3090 |
|
|
3091 |
@property
|
|
3092 |
def egg_fragment(self):
|
|
3093 |
match = self._egg_fragment_re.search(self.url)
|
|
3094 |
if not match:
|
|
3095 |
return None
|
|
3096 |
return match.group(1)
|
|
3097 |
|
|
3098 |
_md5_re = re.compile(r'md5=([a-f0-9]+)')
|
|
3099 |
|
|
3100 |
@property
|
|
3101 |
def md5_hash(self):
|
|
3102 |
match = self._md5_re.search(self.url)
|
|
3103 |
if match:
|
|
3104 |
return match.group(1)
|
|
3105 |
return None
|
|
3106 |
|
|
3107 |
@property
|
|
3108 |
def show_url(self):
|
|
3109 |
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
|
|
3110 |
|
|
3111 |
############################################################
|
|
3112 |
## Writing freeze files
|
|
3113 |
|
|
3114 |
|
|
3115 |
class FrozenRequirement(object):
|
|
3116 |
|
|
3117 |
def __init__(self, name, req, editable, comments=()):
|
|
3118 |
self.name = name
|
|
3119 |
self.req = req
|
|
3120 |
self.editable = editable
|
|
3121 |
self.comments = comments
|
|
3122 |
|
|
3123 |
_rev_re = re.compile(r'-r(\d+)$')
|
|
3124 |
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
|
|
3125 |
|
|
3126 |
@classmethod
|
|
3127 |
def from_dist(cls, dist, dependency_links, find_tags=False):
|
|
3128 |
location = os.path.normcase(os.path.abspath(dist.location))
|
|
3129 |
comments = []
|
|
3130 |
if vcs.get_backend_name(location):
|
|
3131 |
editable = True
|
|
3132 |
req = get_src_requirement(dist, location, find_tags)
|
|
3133 |
if req is None:
|
|
3134 |
logger.warn('Could not determine repository location of %s' % location)
|
|
3135 |
comments.append('## !! Could not determine repository location')
|
|
3136 |
req = dist.as_requirement()
|
|
3137 |
editable = False
|
|
3138 |
else:
|
|
3139 |
editable = False
|
|
3140 |
req = dist.as_requirement()
|
|
3141 |
specs = req.specs
|
|
3142 |
assert len(specs) == 1 and specs[0][0] == '=='
|
|
3143 |
version = specs[0][1]
|
|
3144 |
ver_match = cls._rev_re.search(version)
|
|
3145 |
date_match = cls._date_re.search(version)
|
|
3146 |
if ver_match or date_match:
|
|
3147 |
svn_backend = vcs.get_backend('svn')
|
|
3148 |
if svn_backend:
|
|
3149 |
svn_location = svn_backend(
|
|
3150 |
).get_location(dist, dependency_links)
|
|
3151 |
if not svn_location:
|
|
3152 |
logger.warn(
|
|
3153 |
'Warning: cannot find svn location for %s' % req)
|
|
3154 |
comments.append('## FIXME: could not find svn URL in dependency_links for this package:')
|
|
3155 |
else:
|
|
3156 |
comments.append('# Installing as editable to satisfy requirement %s:' % req)
|
|
3157 |
if ver_match:
|
|
3158 |
rev = ver_match.group(1)
|
|
3159 |
else:
|
|
3160 |
rev = '{%s}' % date_match.group(1)
|
|
3161 |
editable = True
|
|
3162 |
req = 'svn+%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist))
|
|
3163 |
return cls(dist.project_name, req, editable, comments)
|
|
3164 |
|
|
3165 |
@staticmethod
|
|
3166 |
def egg_name(dist):
|
|
3167 |
name = dist.egg_name()
|
|
3168 |
match = re.search(r'-py\d\.\d$', name)
|
|
3169 |
if match:
|
|
3170 |
name = name[:match.start()]
|
|
3171 |
return name
|
|
3172 |
|
|
3173 |
def __str__(self):
|
|
3174 |
req = self.req
|
|
3175 |
if self.editable:
|
|
3176 |
req = '-e %s' % req
|
|
3177 |
return '\n'.join(list(self.comments)+[str(req)])+'\n'
|
|
3178 |
|
|
3179 |
class VersionControl(object):
|
|
3180 |
name = ''
|
|
3181 |
dirname = ''
|
|
3182 |
|
|
3183 |
def __init__(self, url=None, *args, **kwargs):
|
|
3184 |
self.url = url
|
|
3185 |
self._cmd = None
|
|
3186 |
super(VersionControl, self).__init__(*args, **kwargs)
|
|
3187 |
|
|
3188 |
def _filter(self, line):
|
|
3189 |
return (Logger.INFO, line)
|
|
3190 |
|
|
3191 |
@property
|
|
3192 |
def cmd(self):
|
|
3193 |
if self._cmd is not None:
|
|
3194 |
return self._cmd
|
|
3195 |
command = find_command(self.name)
|
|
3196 |
if command is None:
|
|
3197 |
raise BadCommand('Cannot find command %s' % self.name)
|
|
3198 |
logger.info('Found command %s at %s' % (self.name, command))
|
|
3199 |
self._cmd = command
|
|
3200 |
return command
|
|
3201 |
|
|
3202 |
def get_url_rev(self):
|
|
3203 |
"""
|
|
3204 |
Returns the correct repository URL and revision by parsing the given
|
|
3205 |
repository URL
|
|
3206 |
"""
|
|
3207 |
url = self.url.split('+', 1)[1]
|
|
3208 |
scheme, netloc, path, query, frag = urlparse.urlsplit(url)
|
|
3209 |
rev = None
|
|
3210 |
if '@' in path:
|
|
3211 |
path, rev = path.rsplit('@', 1)
|
|
3212 |
url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
|
|
3213 |
return url, rev
|
|
3214 |
|
|
3215 |
def get_info(self, location):
|
|
3216 |
"""
|
|
3217 |
Returns (url, revision), where both are strings
|
|
3218 |
"""
|
|
3219 |
assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
|
|
3220 |
return self.get_url(location), self.get_revision(location)
|
|
3221 |
|
|
3222 |
def normalize_url(self, url):
|
|
3223 |
"""
|
|
3224 |
Normalize a URL for comparison by unquoting it and removing any trailing slash.
|
|
3225 |
"""
|
|
3226 |
return urllib.unquote(url).rstrip('/')
|
|
3227 |
|
|
3228 |
def compare_urls(self, url1, url2):
|
|
3229 |
"""
|
|
3230 |
Compare two repo URLs for identity, ignoring incidental differences.
|
|
3231 |
"""
|
|
3232 |
return (self.normalize_url(url1) == self.normalize_url(url2))
|
|
3233 |
|
|
3234 |
def parse_vcs_bundle_file(self, content):
|
|
3235 |
"""
|
|
3236 |
Takes the contents of the bundled text file that explains how to revert
|
|
3237 |
the stripped off version control data of the given package and returns
|
|
3238 |
the URL and revision of it.
|
|
3239 |
"""
|
|
3240 |
raise NotImplementedError
|
|
3241 |
|
|
3242 |
def obtain(self, dest):
|
|
3243 |
"""
|
|
3244 |
Called when installing or updating an editable package, takes the
|
|
3245 |
source path of the checkout.
|
|
3246 |
"""
|
|
3247 |
raise NotImplementedError
|
|
3248 |
|
|
3249 |
def switch(self, dest, url, rev_options):
|
|
3250 |
"""
|
|
3251 |
Switch the repo at ``dest`` to point to ``URL``.
|
|
3252 |
"""
|
|
3253 |
raise NotImplemented
|
|
3254 |
|
|
3255 |
def update(self, dest, rev_options):
|
|
3256 |
"""
|
|
3257 |
Update an already-existing repo to the given ``rev_options``.
|
|
3258 |
"""
|
|
3259 |
raise NotImplementedError
|
|
3260 |
|
|
3261 |
def check_destination(self, dest, url, rev_options, rev_display):
|
|
3262 |
"""
|
|
3263 |
Prepare a location to receive a checkout/clone.
|
|
3264 |
|
|
3265 |
Return True if the location is ready for (and requires) a
|
|
3266 |
checkout/clone, False otherwise.
|
|
3267 |
"""
|
|
3268 |
checkout = True
|
|
3269 |
prompt = False
|
|
3270 |
if os.path.exists(dest):
|
|
3271 |
checkout = False
|
|
3272 |
if os.path.exists(os.path.join(dest, self.dirname)):
|
|
3273 |
existing_url = self.get_url(dest)
|
|
3274 |
if self.compare_urls(existing_url, url):
|
|
3275 |
logger.info('%s in %s exists, and has correct URL (%s)'
|
|
3276 |
% (self.repo_name.title(), display_path(dest), url))
|
|
3277 |
logger.notify('Updating %s %s%s'
|
|
3278 |
% (display_path(dest), self.repo_name, rev_display))
|
|
3279 |
self.update(dest, rev_options)
|
|
3280 |
else:
|
|
3281 |
logger.warn('%s %s in %s exists with URL %s'
|
|
3282 |
% (self.name, self.repo_name, display_path(dest), existing_url))
|
|
3283 |
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b'))
|
|
3284 |
else:
|
|
3285 |
logger.warn('Directory %s already exists, and is not a %s %s.'
|
|
3286 |
% (dest, self.name, self.repo_name))
|
|
3287 |
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
|
|
3288 |
if prompt:
|
|
3289 |
logger.warn('The plan is to install the %s repository %s'
|
|
3290 |
% (self.name, url))
|
|
3291 |
response = ask('What to do? %s' % prompt[0], prompt[1])
|
|
3292 |
|
|
3293 |
if response == 's':
|
|
3294 |
logger.notify('Switching %s %s to %s%s'
|
|
3295 |
% (self.repo_name, display_path(dest), url, rev_display))
|
|
3296 |
self.switch(dest, url, rev_options)
|
|
3297 |
elif response == 'i':
|
|
3298 |
# do nothing
|
|
3299 |
pass
|
|
3300 |
elif response == 'w':
|
|
3301 |
logger.warn('Deleting %s' % display_path(dest))
|
|
3302 |
shutil.rmtree(dest)
|
|
3303 |
checkout = True
|
|
3304 |
elif response == 'b':
|
|
3305 |
dest_dir = backup_dir(dest)
|
|
3306 |
logger.warn('Backing up %s to %s'
|
|
3307 |
% (display_path(dest), dest_dir))
|
|
3308 |
shutil.move(dest, dest_dir)
|
|
3309 |
checkout = True
|
|
3310 |
return checkout
|
|
3311 |
|
|
3312 |
def unpack(self, location):
|
|
3313 |
raise NotImplementedError
|
|
3314 |
|
|
3315 |
def get_src_requirement(self, dist, location, find_tags=False):
|
|
3316 |
raise NotImplementedError
|
|
3317 |
|
|
3318 |
_svn_xml_url_re = re.compile('url="([^"]+)"')
|
|
3319 |
_svn_rev_re = re.compile('committed-rev="(\d+)"')
|
|
3320 |
_svn_url_re = re.compile(r'URL: (.+)')
|
|
3321 |
_svn_revision_re = re.compile(r'Revision: (.+)')
|
|
3322 |
|
|
3323 |
class Subversion(VersionControl):
|
|
3324 |
name = 'svn'
|
|
3325 |
dirname = '.svn'
|
|
3326 |
repo_name = 'checkout'
|
|
3327 |
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https')
|
|
3328 |
bundle_file = 'svn-checkout.txt'
|
|
3329 |
guide = ('# This was an svn checkout; to make it a checkout again run:\n'
|
|
3330 |
'svn checkout --force -r %(rev)s %(url)s .\n')
|
|
3331 |
|
|
3332 |
def get_info(self, location):
|
|
3333 |
"""Returns (url, revision), where both are strings"""
|
|
3334 |
assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
|
|
3335 |
output = call_subprocess(
|
|
3336 |
['svn', 'info', location], show_stdout=False, extra_environ={'LANG': 'C'})
|
|
3337 |
match = _svn_url_re.search(output)
|
|
3338 |
if not match:
|
|
3339 |
logger.warn('Cannot determine URL of svn checkout %s' % display_path(location))
|
|
3340 |
logger.info('Output that cannot be parsed: \n%s' % output)
|
|
3341 |
return None, None
|
|
3342 |
url = match.group(1).strip()
|
|
3343 |
match = _svn_revision_re.search(output)
|
|
3344 |
if not match:
|
|
3345 |
logger.warn('Cannot determine revision of svn checkout %s' % display_path(location))
|
|
3346 |
logger.info('Output that cannot be parsed: \n%s' % output)
|
|
3347 |
return url, None
|
|
3348 |
return url, match.group(1)
|
|
3349 |
|
|
3350 |
def get_url(self, location):
|
|
3351 |
return self.get_info(location)[0]
|
|
3352 |
|
|
3353 |
def get_revision(self, location):
|
|
3354 |
return self.get_info(location)[1]
|
|
3355 |
|
|
3356 |
def parse_vcs_bundle_file(self, content):
|
|
3357 |
for line in content.splitlines():
|
|
3358 |
if not line.strip() or line.strip().startswith('#'):
|
|
3359 |
continue
|
|
3360 |
match = re.search(r'^-r\s*([^ ])?', line)
|
|
3361 |
if not match:
|
|
3362 |
return None, None
|
|
3363 |
rev = match.group(1)
|
|
3364 |
rest = line[match.end():].strip().split(None, 1)[0]
|
|
3365 |
return rest, rev
|
|
3366 |
return None, None
|
|
3367 |
|
|
3368 |
def unpack(self, location):
|
|
3369 |
"""Check out the svn repository at the url to the destination location"""
|
|
3370 |
url, rev = self.get_url_rev()
|
|
3371 |
logger.notify('Checking out svn repository %s to %s' % (url, location))
|
|
3372 |
logger.indent += 2
|
|
3373 |
try:
|
|
3374 |
if os.path.exists(location):
|
|
3375 |
# Subversion doesn't like to check out over an existing directory
|
|
3376 |
# --force fixes this, but was only added in svn 1.5
|
|
3377 |
shutil.rmtree(location, onerror=rmtree_errorhandler)
|
|
3378 |
call_subprocess(
|
|
3379 |
['svn', 'checkout', url, location],
|
|
3380 |
filter_stdout=self._filter, show_stdout=False)
|
|
3381 |
finally:
|
|
3382 |
logger.indent -= 2
|
|
3383 |
|
|
3384 |
def export(self, location):
|
|
3385 |
"""Export the svn repository at the url to the destination location"""
|
|
3386 |
url, rev = self.get_url_rev()
|
|
3387 |
logger.notify('Checking out svn repository %s to %s' % (url, location))
|
|
3388 |
logger.indent += 2
|
|
3389 |
try:
|
|
3390 |
if os.path.exists(location):
|
|
3391 |
# Subversion doesn't like to check out over an existing directory
|
|
3392 |
# --force fixes this, but was only added in svn 1.5
|
|
3393 |
shutil.rmtree(location, onerror=rmtree_errorhandler)
|
|
3394 |
call_subprocess(
|
|
3395 |
['svn', 'export', url, location],
|
|
3396 |
filter_stdout=self._filter, show_stdout=False)
|
|
3397 |
finally:
|
|
3398 |
logger.indent -= 2
|
|
3399 |
|
|
3400 |
def switch(self, dest, url, rev_options):
|
|
3401 |
call_subprocess(
|
|
3402 |
['svn', 'switch'] + rev_options + [url, dest])
|
|
3403 |
|
|
3404 |
def update(self, dest, rev_options):
|
|
3405 |
call_subprocess(
|
|
3406 |
['svn', 'update'] + rev_options + [dest])
|
|
3407 |
|
|
3408 |
def obtain(self, dest):
|
|
3409 |
url, rev = self.get_url_rev()
|
|
3410 |
if rev:
|
|
3411 |
rev_options = ['-r', rev]
|
|
3412 |
rev_display = ' (to revision %s)' % rev
|
|
3413 |
else:
|
|
3414 |
rev_options = []
|
|
3415 |
rev_display = ''
|
|
3416 |
if self.check_destination(dest, url, rev_options, rev_display):
|
|
3417 |
logger.notify('Checking out %s%s to %s'
|
|
3418 |
% (url, rev_display, display_path(dest)))
|
|
3419 |
call_subprocess(
|
|
3420 |
['svn', 'checkout', '-q'] + rev_options + [url, dest])
|
|
3421 |
|
|
3422 |
def get_location(self, dist, dependency_links):
|
|
3423 |
egg_fragment_re = re.compile(r'#egg=(.*)$')
|
|
3424 |
for url in dependency_links:
|
|
3425 |
egg_fragment = Link(url).egg_fragment
|
|
3426 |
if not egg_fragment:
|
|
3427 |
continue
|
|
3428 |
if '-' in egg_fragment:
|
|
3429 |
## FIXME: will this work when a package has - in the name?
|
|
3430 |
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
|
|
3431 |
else:
|
|
3432 |
key = egg_fragment
|
|
3433 |
if key == dist.key:
|
|
3434 |
return url.split('#', 1)[0]
|
|
3435 |
return None
|
|
3436 |
|
|
3437 |
def get_revision(self, location):
|
|
3438 |
"""
|
|
3439 |
Return the maximum revision for all files under a given location
|
|
3440 |
"""
|
|
3441 |
# Note: taken from setuptools.command.egg_info
|
|
3442 |
revision = 0
|
|
3443 |
|
|
3444 |
for base, dirs, files in os.walk(location):
|
|
3445 |
if self.dirname not in dirs:
|
|
3446 |
dirs[:] = []
|
|
3447 |
continue # no sense walking uncontrolled subdirs
|
|
3448 |
dirs.remove(self.dirname)
|
|
3449 |
entries_fn = os.path.join(base, self.dirname, 'entries')
|
|
3450 |
if not os.path.exists(entries_fn):
|
|
3451 |
## FIXME: should we warn?
|
|
3452 |
continue
|
|
3453 |
f = open(entries_fn)
|
|
3454 |
data = f.read()
|
|
3455 |
f.close()
|
|
3456 |
|
|
3457 |
if data.startswith('8') or data.startswith('9') or data.startswith('10'):
|
|
3458 |
data = map(str.splitlines,data.split('\n\x0c\n'))
|
|
3459 |
del data[0][0] # get rid of the '8'
|
|
3460 |
dirurl = data[0][3]
|
|
3461 |
revs = [int(d[9]) for d in data if len(d)>9 and d[9]]+[0]
|
|
3462 |
if revs:
|
|
3463 |
localrev = max(revs)
|
|
3464 |
else:
|
|
3465 |
localrev = 0
|
|
3466 |
elif data.startswith('<?xml'):
|
|
3467 |
dirurl = _svn_xml_url_re.search(data).group(1) # get repository URL
|
|
3468 |
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)]+[0]
|
|
3469 |
if revs:
|
|
3470 |
localrev = max(revs)
|
|
3471 |
else:
|
|
3472 |
localrev = 0
|
|
3473 |
else:
|
|
3474 |
logger.warn("Unrecognized .svn/entries format; skipping %s", base)
|
|
3475 |
dirs[:] = []
|
|
3476 |
continue
|
|
3477 |
if base == location:
|
|
3478 |
base_url = dirurl+'/' # save the root url
|
|
3479 |
elif not dirurl.startswith(base_url):
|
|
3480 |
dirs[:] = []
|
|
3481 |
continue # not part of the same svn tree, skip it
|
|
3482 |
revision = max(revision, localrev)
|
|
3483 |
return revision
|
|
3484 |
|
|
3485 |
def get_url(self, location):
|
|
3486 |
# In cases where the source is in a subdirectory, not alongside setup.py
|
|
3487 |
# we have to look up in the location until we find a real setup.py
|
|
3488 |
orig_location = location
|
|
3489 |
while not os.path.exists(os.path.join(location, 'setup.py')):
|
|
3490 |
last_location = location
|
|
3491 |
location = os.path.dirname(location)
|
|
3492 |
if location == last_location:
|
|
3493 |
# We've traversed up to the root of the filesystem without finding setup.py
|
|
3494 |
logger.warn("Could not find setup.py for directory %s (tried all parent directories)"
|
|
3495 |
% orig_location)
|
|
3496 |
return None
|
|
3497 |
f = open(os.path.join(location, self.dirname, 'entries'))
|
|
3498 |
data = f.read()
|
|
3499 |
f.close()
|
|
3500 |
if data.startswith('8') or data.startswith('9') or data.startswith('10'):
|
|
3501 |
data = map(str.splitlines,data.split('\n\x0c\n'))
|
|
3502 |
del data[0][0] # get rid of the '8'
|
|
3503 |
return data[0][3]
|
|
3504 |
elif data.startswith('<?xml'):
|
|
3505 |
match = _svn_xml_url_re.search(data)
|
|
3506 |
if not match:
|
|
3507 |
raise ValueError('Badly formatted data: %r' % data)
|
|
3508 |
return match.group(1) # get repository URL
|
|
3509 |
else:
|
|
3510 |
logger.warn("Unrecognized .svn/entries format in %s" % location)
|
|
3511 |
# Or raise exception?
|
|
3512 |
return None
|
|
3513 |
|
|
3514 |
def get_tag_revs(self, svn_tag_url):
|
|
3515 |
stdout = call_subprocess(
|
|
3516 |
['svn', 'ls', '-v', svn_tag_url], show_stdout=False)
|
|
3517 |
results = []
|
|
3518 |
for line in stdout.splitlines():
|
|
3519 |
parts = line.split()
|
|
3520 |
rev = int(parts[0])
|
|
3521 |
tag = parts[-1].strip('/')
|
|
3522 |
results.append((tag, rev))
|
|
3523 |
return results
|
|
3524 |
|
|
3525 |
def find_tag_match(self, rev, tag_revs):
|
|
3526 |
best_match_rev = None
|
|
3527 |
best_tag = None
|
|
3528 |
for tag, tag_rev in tag_revs:
|
|
3529 |
if (tag_rev > rev and
|
|
3530 |
(best_match_rev is None or best_match_rev > tag_rev)):
|
|
3531 |
# FIXME: Is best_match > tag_rev really possible?
|
|
3532 |
# or is it a sign something is wacky?
|
|
3533 |
best_match_rev = tag_rev
|
|
3534 |
best_tag = tag
|
|
3535 |
return best_tag
|
|
3536 |
|
|
3537 |
def get_src_requirement(self, dist, location, find_tags=False):
|
|
3538 |
repo = self.get_url(location)
|
|
3539 |
if repo is None:
|
|
3540 |
return None
|
|
3541 |
parts = repo.split('/')
|
|
3542 |
## FIXME: why not project name?
|
|
3543 |
egg_project_name = dist.egg_name().split('-', 1)[0]
|
|
3544 |
rev = self.get_revision(location)
|
|
3545 |
if parts[-2] in ('tags', 'tag'):
|
|
3546 |
# It's a tag, perfect!
|
|
3547 |
full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
|
|
3548 |
elif parts[-2] in ('branches', 'branch'):
|
|
3549 |
# It's a branch :(
|
|
3550 |
full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
|
|
3551 |
elif parts[-1] == 'trunk':
|
|
3552 |
# Trunk :-/
|
|
3553 |
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
|
|
3554 |
if find_tags:
|
|
3555 |
tag_url = '/'.join(parts[:-1]) + '/tags'
|
|
3556 |
tag_revs = self.get_tag_revs(tag_url)
|
|
3557 |
match = self.find_tag_match(rev, tag_revs)
|
|
3558 |
if match:
|
|
3559 |
logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
|
|
3560 |
repo = '%s/%s' % (tag_url, match)
|
|
3561 |
full_egg_name = '%s-%s' % (egg_project_name, match)
|
|
3562 |
else:
|
|
3563 |
# Don't know what it is
|
|
3564 |
logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
|
|
3565 |
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
|
|
3566 |
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
|
|
3567 |
|
|
3568 |
vcs.register(Subversion)
|
|
3569 |
|
|
3570 |
|
|
3571 |
class Git(VersionControl):
|
|
3572 |
name = 'git'
|
|
3573 |
dirname = '.git'
|
|
3574 |
repo_name = 'clone'
|
|
3575 |
schemes = ('git', 'git+http', 'git+ssh', 'git+git')
|
|
3576 |
bundle_file = 'git-clone.txt'
|
|
3577 |
guide = ('# This was a Git repo; to make it a repo again run:\n'
|
|
3578 |
'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n')
|
|
3579 |
|
|
3580 |
def parse_vcs_bundle_file(self, content):
|
|
3581 |
url = rev = None
|
|
3582 |
for line in content.splitlines():
|
|
3583 |
if not line.strip() or line.strip().startswith('#'):
|
|
3584 |
continue
|
|
3585 |
url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line)
|
|
3586 |
if url_match:
|
|
3587 |
url = url_match.group(1).strip()
|
|
3588 |
rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line)
|
|
3589 |
if rev_match:
|
|
3590 |
rev = rev_match.group(1).strip()
|
|
3591 |
if url and rev:
|
|
3592 |
return url, rev
|
|
3593 |
return None, None
|
|
3594 |
|
|
3595 |
def unpack(self, location):
|
|
3596 |
"""Clone the Git repository at the url to the destination location"""
|
|
3597 |
url, rev = self.get_url_rev()
|
|
3598 |
logger.notify('Cloning Git repository %s to %s' % (url, location))
|
|
3599 |
logger.indent += 2
|
|
3600 |
try:
|
|
3601 |
if os.path.exists(location):
|
|
3602 |
os.rmdir(location)
|
|
3603 |
call_subprocess(
|
|
3604 |
[self.cmd, 'clone', url, location],
|
|
3605 |
filter_stdout=self._filter, show_stdout=False)
|
|
3606 |
finally:
|
|
3607 |
logger.indent -= 2
|
|
3608 |
|
|
3609 |
def export(self, location):
|
|
3610 |
"""Export the Git repository at the url to the destination location"""
|
|
3611 |
temp_dir = tempfile.mkdtemp('-export', 'pip-')
|
|
3612 |
self.unpack(temp_dir)
|
|
3613 |
try:
|
|
3614 |
if not location.endswith('/'):
|
|
3615 |
location = location + '/'
|
|
3616 |
call_subprocess(
|
|
3617 |
[self.cmd, 'checkout-index', '-a', '-f', '--prefix', location],
|
|
3618 |
filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
|
|
3619 |
finally:
|
|
3620 |
shutil.rmtree(temp_dir)
|
|
3621 |
|
|
3622 |
def check_rev_options(self, rev, dest, rev_options):
|
|
3623 |
"""Check the revision options before checkout to compensate that tags
|
|
3624 |
and branches may need origin/ as a prefix"""
|
|
3625 |
if rev is None:
|
|
3626 |
# bail and use preset
|
|
3627 |
return rev_options
|
|
3628 |
revisions = self.get_tag_revs(dest)
|
|
3629 |
revisions.update(self.get_branch_revs(dest))
|
|
3630 |
if rev in revisions:
|
|
3631 |
# if rev is a sha
|
|
3632 |
return [rev]
|
|
3633 |
inverse_revisions = dict((v,k) for k, v in revisions.iteritems())
|
|
3634 |
if rev not in inverse_revisions: # is rev a name or tag?
|
|
3635 |
origin_rev = 'origin/%s' % rev
|
|
3636 |
if origin_rev in inverse_revisions:
|
|
3637 |
rev = inverse_revisions[origin_rev]
|
|
3638 |
else:
|
|
3639 |
logger.warn("Could not find a tag or branch '%s', assuming commit." % rev)
|
|
3640 |
return [rev]
|
|
3641 |
|
|
3642 |
def switch(self, dest, url, rev_options):
|
|
3643 |
|
|
3644 |
call_subprocess(
|
|
3645 |
[self.cmd, 'config', 'remote.origin.url', url], cwd=dest)
|
|
3646 |
call_subprocess(
|
|
3647 |
[self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
|
|
3648 |
|
|
3649 |
def update(self, dest, rev_options):
|
|
3650 |
call_subprocess([self.cmd, 'fetch', '-q'], cwd=dest)
|
|
3651 |
call_subprocess(
|
|
3652 |
[self.cmd, 'checkout', '-q', '-f'] + rev_options, cwd=dest)
|
|
3653 |
|
|
3654 |
def obtain(self, dest):
|
|
3655 |
url, rev = self.get_url_rev()
|
|
3656 |
if rev:
|
|
3657 |
rev_options = [rev]
|
|
3658 |
rev_display = ' (to %s)' % rev
|
|
3659 |
else:
|
|
3660 |
rev_options = ['origin/master']
|
|
3661 |
rev_display = ''
|
|
3662 |
if self.check_destination(dest, url, rev_options, rev_display):
|
|
3663 |
logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest)))
|
|
3664 |
call_subprocess(
|
|
3665 |
[self.cmd, 'clone', '-q', url, dest])
|
|
3666 |
rev_options = self.check_rev_options(rev, dest, rev_options)
|
|
3667 |
call_subprocess(
|
|
3668 |
[self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
|
|
3669 |
|
|
3670 |
def get_url(self, location):
|
|
3671 |
url = call_subprocess(
|
|
3672 |
[self.cmd, 'config', 'remote.origin.url'],
|
|
3673 |
show_stdout=False, cwd=location)
|
|
3674 |
return url.strip()
|
|
3675 |
|
|
3676 |
def get_revision(self, location):
|
|
3677 |
current_rev = call_subprocess(
|
|
3678 |
[self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location)
|
|
3679 |
return current_rev.strip()
|
|
3680 |
|
|
3681 |
def get_tag_revs(self, location):
|
|
3682 |
tags = call_subprocess(
|
|
3683 |
[self.cmd, 'tag'], show_stdout=False, cwd=location)
|
|
3684 |
tag_revs = []
|
|
3685 |
for line in tags.splitlines():
|
|
3686 |
tag = line.strip()
|
|
3687 |
rev = call_subprocess(
|
|
3688 |
[self.cmd, 'rev-parse', tag], show_stdout=False, cwd=location)
|
|
3689 |
tag_revs.append((rev.strip(), tag))
|
|
3690 |
tag_revs = dict(tag_revs)
|
|
3691 |
return tag_revs
|
|
3692 |
|
|
3693 |
def get_branch_revs(self, location):
|
|
3694 |
branches = call_subprocess(
|
|
3695 |
[self.cmd, 'branch', '-r'], show_stdout=False, cwd=location)
|
|
3696 |
branch_revs = []
|
|
3697 |
for line in branches.splitlines():
|
|
3698 |
line = line.split('->')[0].strip()
|
|
3699 |
branch = "".join([b for b in line.split() if b != '*'])
|
|
3700 |
rev = call_subprocess(
|
|
3701 |
[self.cmd, 'rev-parse', branch], show_stdout=False, cwd=location)
|
|
3702 |
branch_revs.append((rev.strip(), branch))
|
|
3703 |
branch_revs = dict(branch_revs)
|
|
3704 |
return branch_revs
|
|
3705 |
|
|
3706 |
def get_src_requirement(self, dist, location, find_tags):
|
|
3707 |
repo = self.get_url(location)
|
|
3708 |
if not repo.lower().startswith('git:'):
|
|
3709 |
repo = 'git+' + repo
|
|
3710 |
egg_project_name = dist.egg_name().split('-', 1)[0]
|
|
3711 |
if not repo:
|
|
3712 |
return None
|
|
3713 |
current_rev = self.get_revision(location)
|
|
3714 |
tag_revs = self.get_tag_revs(location)
|
|
3715 |
branch_revs = self.get_branch_revs(location)
|
|
3716 |
|
|
3717 |
if current_rev in tag_revs:
|
|
3718 |
# It's a tag
|
|
3719 |
full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
|
|
3720 |
elif (current_rev in branch_revs and
|
|
3721 |
branch_revs[current_rev] != 'origin/master'):
|
|
3722 |
# It's the head of a branch
|
|
3723 |
full_egg_name = '%s-%s' % (dist.egg_name(),
|
|
3724 |
branch_revs[current_rev].replace('origin/', ''))
|
|
3725 |
else:
|
|
3726 |
full_egg_name = '%s-dev' % dist.egg_name()
|
|
3727 |
|
|
3728 |
return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
|
|
3729 |
|
|
3730 |
def get_url_rev(self):
|
|
3731 |
"""
|
|
3732 |
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
|
|
3733 |
That's required because although they use SSH they sometimes doesn't
|
|
3734 |
work with a ssh:// scheme (e.g. Github). But we need a scheme for
|
|
3735 |
parsing. Hence we remove it again afterwards and return it as a stub.
|
|
3736 |
"""
|
|
3737 |
if not '://' in self.url:
|
|
3738 |
self.url = self.url.replace('git+', 'git+ssh://')
|
|
3739 |
url, rev = super(Git, self).get_url_rev()
|
|
3740 |
url = url.replace('ssh://', '')
|
|
3741 |
return url, rev
|
|
3742 |
return super(Git, self).get_url_rev()
|
|
3743 |
|
|
3744 |
vcs.register(Git)
|
|
3745 |
|
|
3746 |
class Mercurial(VersionControl):
|
|
3747 |
name = 'hg'
|
|
3748 |
dirname = '.hg'
|
|
3749 |
repo_name = 'clone'
|
|
3750 |
schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
|
|
3751 |
bundle_file = 'hg-clone.txt'
|
|
3752 |
guide = ('# This was a Mercurial repo; to make it a repo again run:\n'
|
|
3753 |
'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n')
|
|
3754 |
|
|
3755 |
def parse_vcs_bundle_file(self, content):
|
|
3756 |
url = rev = None
|
|
3757 |
for line in content.splitlines():
|
|
3758 |
if not line.strip() or line.strip().startswith('#'):
|
|
3759 |
continue
|
|
3760 |
url_match = re.search(r'hg\s*pull\s*(.*)\s*', line)
|
|
3761 |
if url_match:
|
|
3762 |
url = url_match.group(1).strip()
|
|
3763 |
rev_match = re.search(r'^hg\s*update\s*-r\s*(.*)\s*', line)
|
|
3764 |
if rev_match:
|
|
3765 |
rev = rev_match.group(1).strip()
|
|
3766 |
if url and rev:
|
|
3767 |
return url, rev
|
|
3768 |
return None, None
|
|
3769 |
|
|
3770 |
def unpack(self, location):
|
|
3771 |
"""Clone the Hg repository at the url to the destination location"""
|
|
3772 |
url, rev = self.get_url_rev()
|
|
3773 |
logger.notify('Cloning Mercurial repository %s to %s' % (url, location))
|
|
3774 |
logger.indent += 2
|
|
3775 |
try:
|
|
3776 |
if os.path.exists(location):
|
|
3777 |
os.rmdir(location)
|
|
3778 |
call_subprocess(
|
|
3779 |
['hg', 'clone', url, location],
|
|
3780 |
filter_stdout=self._filter, show_stdout=False)
|
|
3781 |
finally:
|
|
3782 |
logger.indent -= 2
|
|
3783 |
|
|
3784 |
def export(self, location):
|
|
3785 |
"""Export the Hg repository at the url to the destination location"""
|
|
3786 |
temp_dir = tempfile.mkdtemp('-export', 'pip-')
|
|
3787 |
self.unpack(temp_dir)
|
|
3788 |
try:
|
|
3789 |
call_subprocess(
|
|
3790 |
['hg', 'archive', location],
|
|
3791 |
filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
|
|
3792 |
finally:
|
|
3793 |
shutil.rmtree(temp_dir)
|
|
3794 |
|
|
3795 |
def switch(self, dest, url, rev_options):
|
|
3796 |
repo_config = os.path.join(dest, self.dirname, 'hgrc')
|
|
3797 |
config = ConfigParser.SafeConfigParser()
|
|
3798 |
try:
|
|
3799 |
config.read(repo_config)
|
|
3800 |
config.set('paths', 'default', url)
|
|
3801 |
config_file = open(repo_config, 'w')
|
|
3802 |
config.write(config_file)
|
|
3803 |
config_file.close()
|
|
3804 |
except (OSError, ConfigParser.NoSectionError), e:
|
|
3805 |
logger.warn(
|
|
3806 |
'Could not switch Mercurial repository to %s: %s'
|
|
3807 |
% (url, e))
|
|
3808 |
else:
|
|
3809 |
call_subprocess(['hg', 'update', '-q'] + rev_options, cwd=dest)
|
|
3810 |
|
|
3811 |
def update(self, dest, rev_options):
|
|
3812 |
call_subprocess(['hg', 'pull', '-q'], cwd=dest)
|
|
3813 |
call_subprocess(
|
|
3814 |
['hg', 'update', '-q'] + rev_options, cwd=dest)
|
|
3815 |
|
|
3816 |
def obtain(self, dest):
|
|
3817 |
url, rev = self.get_url_rev()
|
|
3818 |
if rev:
|
|
3819 |
rev_options = [rev]
|
|
3820 |
rev_display = ' (to revision %s)' % rev
|
|
3821 |
else:
|
|
3822 |
rev_options = []
|
|
3823 |
rev_display = ''
|
|
3824 |
if self.check_destination(dest, url, rev_options, rev_display):
|
|
3825 |
logger.notify('Cloning hg %s%s to %s'
|
|
3826 |
% (url, rev_display, display_path(dest)))
|
|
3827 |
call_subprocess(['hg', 'clone', '-q', url, dest])
|
|
3828 |
call_subprocess(['hg', 'update', '-q'] + rev_options, cwd=dest)
|
|
3829 |
|
|
3830 |
def get_url(self, location):
|
|
3831 |
url = call_subprocess(
|
|
3832 |
['hg', 'showconfig', 'paths.default'],
|
|
3833 |
show_stdout=False, cwd=location).strip()
|
|
3834 |
if url.startswith('/') or url.startswith('\\'):
|
|
3835 |
url = filename_to_url(url)
|
|
3836 |
return url.strip()
|
|
3837 |
|
|
3838 |
def get_tag_revs(self, location):
|
|
3839 |
tags = call_subprocess(
|
|
3840 |
['hg', 'tags'], show_stdout=False, cwd=location)
|
|
3841 |
tag_revs = []
|
|
3842 |
for line in tags.splitlines():
|
|
3843 |
tags_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
|
|
3844 |
if tags_match:
|
|
3845 |
tag = tags_match.group(1)
|
|
3846 |
rev = tags_match.group(2)
|
|
3847 |
tag_revs.append((rev.strip(), tag.strip()))
|
|
3848 |
return dict(tag_revs)
|
|
3849 |
|
|
3850 |
def get_branch_revs(self, location):
|
|
3851 |
branches = call_subprocess(
|
|
3852 |
['hg', 'branches'], show_stdout=False, cwd=location)
|
|
3853 |
branch_revs = []
|
|
3854 |
for line in branches.splitlines():
|
|
3855 |
branches_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
|
|
3856 |
if branches_match:
|
|
3857 |
branch = branches_match.group(1)
|
|
3858 |
rev = branches_match.group(2)
|
|
3859 |
branch_revs.append((rev.strip(), branch.strip()))
|
|
3860 |
return dict(branch_revs)
|
|
3861 |
|
|
3862 |
def get_revision(self, location):
|
|
3863 |
current_revision = call_subprocess(
|
|
3864 |
['hg', 'parents', '--template={rev}'],
|
|
3865 |
show_stdout=False, cwd=location).strip()
|
|
3866 |
return current_revision
|
|
3867 |
|
|
3868 |
def get_revision_hash(self, location):
|
|
3869 |
current_rev_hash = call_subprocess(
|
|
3870 |
['hg', 'parents', '--template={node}'],
|
|
3871 |
show_stdout=False, cwd=location).strip()
|
|
3872 |
return current_rev_hash
|
|
3873 |
|
|
3874 |
def get_src_requirement(self, dist, location, find_tags):
|
|
3875 |
repo = self.get_url(location)
|
|
3876 |
if not repo.lower().startswith('hg:'):
|
|
3877 |
repo = 'hg+' + repo
|
|
3878 |
egg_project_name = dist.egg_name().split('-', 1)[0]
|
|
3879 |
if not repo:
|
|
3880 |
return None
|
|
3881 |
current_rev = self.get_revision(location)
|
|
3882 |
current_rev_hash = self.get_revision_hash(location)
|
|
3883 |
tag_revs = self.get_tag_revs(location)
|
|
3884 |
branch_revs = self.get_branch_revs(location)
|
|
3885 |
if current_rev in tag_revs:
|
|
3886 |
# It's a tag
|
|
3887 |
full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
|
|
3888 |
elif current_rev in branch_revs:
|
|
3889 |
# It's the tip of a branch
|
|
3890 |
full_egg_name = '%s-%s' % (dist.egg_name(), branch_revs[current_rev])
|
|
3891 |
else:
|
|
3892 |
full_egg_name = '%s-dev' % dist.egg_name()
|
|
3893 |
return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name)
|
|
3894 |
|
|
3895 |
vcs.register(Mercurial)
|
|
3896 |
|
|
3897 |
|
|
3898 |
class Bazaar(VersionControl):
|
|
3899 |
name = 'bzr'
|
|
3900 |
dirname = '.bzr'
|
|
3901 |
repo_name = 'branch'
|
|
3902 |
bundle_file = 'bzr-branch.txt'
|
|
3903 |
schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp')
|
|
3904 |
guide = ('# This was a Bazaar branch; to make it a branch again run:\n'
|
|
3905 |
'bzr branch -r %(rev)s %(url)s .\n')
|
|
3906 |
|
|
3907 |
def parse_vcs_bundle_file(self, content):
|
|
3908 |
url = rev = None
|
|
3909 |
for line in content.splitlines():
|
|
3910 |
if not line.strip() or line.strip().startswith('#'):
|
|
3911 |
continue
|
|
3912 |
match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line)
|
|
3913 |
if match:
|
|
3914 |
rev = match.group(1).strip()
|
|
3915 |
url = line[match.end():].strip().split(None, 1)[0]
|
|
3916 |
if url and rev:
|
|
3917 |
return url, rev
|
|
3918 |
return None, None
|
|
3919 |
|
|
3920 |
def unpack(self, location):
|
|
3921 |
"""Get the bzr branch at the url to the destination location"""
|
|
3922 |
url, rev = self.get_url_rev()
|
|
3923 |
logger.notify('Checking out bzr repository %s to %s' % (url, location))
|
|
3924 |
logger.indent += 2
|
|
3925 |
try:
|
|
3926 |
if os.path.exists(location):
|
|
3927 |
os.rmdir(location)
|
|
3928 |
call_subprocess(
|
|
3929 |
[self.cmd, 'branch', url, location],
|
|
3930 |
filter_stdout=self._filter, show_stdout=False)
|
|
3931 |
finally:
|
|
3932 |
logger.indent -= 2
|
|
3933 |
|
|
3934 |
def export(self, location):
|
|
3935 |
"""Export the Bazaar repository at the url to the destination location"""
|
|
3936 |
temp_dir = tempfile.mkdtemp('-export', 'pip-')
|
|
3937 |
self.unpack(temp_dir)
|
|
3938 |
if os.path.exists(location):
|
|
3939 |
# Remove the location to make sure Bazaar can export it correctly
|
|
3940 |
shutil.rmtree(location, onerror=rmtree_errorhandler)
|
|
3941 |
try:
|
|
3942 |
call_subprocess([self.cmd, 'export', location], cwd=temp_dir,
|
|
3943 |
filter_stdout=self._filter, show_stdout=False)
|
|
3944 |
finally:
|
|
3945 |
shutil.rmtree(temp_dir)
|
|
3946 |
|
|
3947 |
def switch(self, dest, url, rev_options):
|
|
3948 |
call_subprocess([self.cmd, 'switch', url], cwd=dest)
|
|
3949 |
|
|
3950 |
def update(self, dest, rev_options):
|
|
3951 |
call_subprocess(
|
|
3952 |
[self.cmd, 'pull', '-q'] + rev_options, cwd=dest)
|
|
3953 |
|
|
3954 |
def obtain(self, dest):
|
|
3955 |
url, rev = self.get_url_rev()
|
|
3956 |
if rev:
|
|
3957 |
rev_options = ['-r', rev]
|
|
3958 |
rev_display = ' (to revision %s)' % rev
|
|
3959 |
else:
|
|
3960 |
rev_options = []
|
|
3961 |
rev_display = ''
|
|
3962 |
if self.check_destination(dest, url, rev_options, rev_display):
|
|
3963 |
logger.notify('Checking out %s%s to %s'
|
|
3964 |
% (url, rev_display, display_path(dest)))
|
|
3965 |
call_subprocess(
|
|
3966 |
[self.cmd, 'branch', '-q'] + rev_options + [url, dest])
|
|
3967 |
|
|
3968 |
def get_url_rev(self):
|
|
3969 |
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
|
|
3970 |
url, rev = super(Bazaar, self).get_url_rev()
|
|
3971 |
if url.startswith('ssh://'):
|
|
3972 |
url = 'bzr+' + url
|
|
3973 |
return url, rev
|
|
3974 |
|
|
3975 |
def get_url(self, location):
|
|
3976 |
urls = call_subprocess(
|
|
3977 |
[self.cmd, 'info'], show_stdout=False, cwd=location)
|
|
3978 |
for line in urls.splitlines():
|
|
3979 |
line = line.strip()
|
|
3980 |
for x in ('checkout of branch: ',
|
|
3981 |
'parent branch: '):
|
|
3982 |
if line.startswith(x):
|
|
3983 |
return line.split(x)[1]
|
|
3984 |
return None
|
|
3985 |
|
|
3986 |
def get_revision(self, location):
|
|
3987 |
revision = call_subprocess(
|
|
3988 |
[self.cmd, 'revno'], show_stdout=False, cwd=location)
|
|
3989 |
return revision.splitlines()[-1]
|
|
3990 |
|
|
3991 |
def get_tag_revs(self, location):
|
|
3992 |
tags = call_subprocess(
|
|
3993 |
[self.cmd, 'tags'], show_stdout=False, cwd=location)
|
|
3994 |
tag_revs = []
|
|
3995 |
for line in tags.splitlines():
|
|
3996 |
tags_match = re.search(r'([.\w-]+)\s*(.*)$', line)
|
|
3997 |
if tags_match:
|
|
3998 |
tag = tags_match.group(1)
|
|
3999 |
rev = tags_match.group(2)
|
|
4000 |
tag_revs.append((rev.strip(), tag.strip()))
|
|
4001 |
return dict(tag_revs)
|
|
4002 |
|
|
4003 |
def get_src_requirement(self, dist, location, find_tags):
|
|
4004 |
repo = self.get_url(location)
|
|
4005 |
if not repo.lower().startswith('bzr:'):
|
|
4006 |
repo = 'bzr+' + repo
|
|
4007 |
egg_project_name = dist.egg_name().split('-', 1)[0]
|
|
4008 |
if not repo:
|
|
4009 |
return None
|
|
4010 |
current_rev = self.get_revision(location)
|
|
4011 |
tag_revs = self.get_tag_revs(location)
|
|
4012 |
|
|
4013 |
if current_rev in tag_revs:
|
|
4014 |
# It's a tag
|
|
4015 |
tag = tag_revs.get(current_rev, current_rev)
|
|
4016 |
full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
|
|
4017 |
else:
|
|
4018 |
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev)
|
|
4019 |
return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
|
|
4020 |
|
|
4021 |
vcs.register(Bazaar)
|
|
4022 |
|
|
4023 |
def get_src_requirement(dist, location, find_tags):
|
|
4024 |
version_control = vcs.get_backend_from_location(location)
|
|
4025 |
if version_control:
|
|
4026 |
return version_control().get_src_requirement(dist, location, find_tags)
|
|
4027 |
logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
|
|
4028 |
return dist.as_requirement()
|
|
4029 |
|
|
4030 |
############################################################
|
|
4031 |
## Requirement files
|
|
4032 |
|
|
4033 |
_scheme_re = re.compile(r'^(http|https|file):', re.I)
|
|
4034 |
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
|
|
4035 |
def get_file_content(url, comes_from=None):
|
|
4036 |
"""Gets the content of a file; it may be a filename, file: URL, or
|
|
4037 |
http: URL. Returns (location, content)"""
|
|
4038 |
match = _scheme_re.search(url)
|
|
4039 |
if match:
|
|
4040 |
scheme = match.group(1).lower()
|
|
4041 |
if (scheme == 'file' and comes_from
|
|
4042 |
and comes_from.startswith('http')):
|
|
4043 |
raise InstallationError(
|
|
4044 |
'Requirements file %s references URL %s, which is local'
|
|
4045 |
% (comes_from, url))
|
|
4046 |
if scheme == 'file':
|
|
4047 |
path = url.split(':', 1)[1]
|
|
4048 |
path = path.replace('\\', '/')
|
|
4049 |
match = _url_slash_drive_re.match(path)
|
|
4050 |
if match:
|
|
4051 |
path = match.group(1) + ':' + path.split('|', 1)[1]
|
|
4052 |
path = urllib.unquote(path)
|
|
4053 |
if path.startswith('/'):
|
|
4054 |
path = '/' + path.lstrip('/')
|
|
4055 |
url = path
|
|
4056 |
else:
|
|
4057 |
## FIXME: catch some errors
|
|
4058 |
resp = urllib2.urlopen(url)
|
|
4059 |
return resp.geturl(), resp.read()
|
|
4060 |
f = open(url)
|
|
4061 |
content = f.read()
|
|
4062 |
f.close()
|
|
4063 |
return url, content
|
|
4064 |
|
|
4065 |
def parse_requirements(filename, finder=None, comes_from=None, options=None):
|
|
4066 |
skip_match = None
|
|
4067 |
skip_regex = options.skip_requirements_regex
|
|
4068 |
if skip_regex:
|
|
4069 |
skip_match = re.compile(skip_regex)
|
|
4070 |
filename, content = get_file_content(filename, comes_from=comes_from)
|
|
4071 |
for line_number, line in enumerate(content.splitlines()):
|
|
4072 |
line_number += 1
|
|
4073 |
line = line.strip()
|
|
4074 |
if not line or line.startswith('#'):
|
|
4075 |
continue
|
|
4076 |
if skip_match and skip_match.search(line):
|
|
4077 |
continue
|
|
4078 |
if line.startswith('-r') or line.startswith('--requirement'):
|
|
4079 |
if line.startswith('-r'):
|
|
4080 |
req_url = line[2:].strip()
|
|
4081 |
else:
|
|
4082 |
req_url = line[len('--requirement'):].strip().strip('=')
|
|
4083 |
if _scheme_re.search(filename):
|
|
4084 |
# Relative to a URL
|
|
4085 |
req_url = urlparse.urljoin(filename, url)
|
|
4086 |
elif not _scheme_re.search(req_url):
|
|
4087 |
req_url = os.path.join(os.path.dirname(filename), req_url)
|
|
4088 |
for item in parse_requirements(req_url, finder, comes_from=filename, options=options):
|
|
4089 |
yield item
|
|
4090 |
elif line.startswith('-Z') or line.startswith('--always-unzip'):
|
|
4091 |
# No longer used, but previously these were used in
|
|
4092 |
# requirement files, so we'll ignore.
|
|
4093 |
pass
|
|
4094 |
elif finder and line.startswith('-f') or line.startswith('--find-links'):
|
|
4095 |
if line.startswith('-f'):
|
|
4096 |
line = line[2:].strip()
|
|
4097 |
else:
|
|
4098 |
line = line[len('--find-links'):].strip().lstrip('=')
|
|
4099 |
## FIXME: it would be nice to keep track of the source of
|
|
4100 |
## the find_links:
|
|
4101 |
finder.find_links.append(line)
|
|
4102 |
elif line.startswith('-i') or line.startswith('--index-url'):
|
|
4103 |
if line.startswith('-i'):
|
|
4104 |
line = line[2:].strip()
|
|
4105 |
else:
|
|
4106 |
line = line[len('--index-url'):].strip().lstrip('=')
|
|
4107 |
finder.index_urls = [line]
|
|
4108 |
elif line.startswith('--extra-index-url'):
|
|
4109 |
line = line[len('--extra-index-url'):].strip().lstrip('=')
|
|
4110 |
finder.index_urls.append(line)
|
|
4111 |
else:
|
|
4112 |
comes_from = '-r %s (line %s)' % (filename, line_number)
|
|
4113 |
if line.startswith('-e') or line.startswith('--editable'):
|
|
4114 |
if line.startswith('-e'):
|
|
4115 |
line = line[2:].strip()
|
|
4116 |
else:
|
|
4117 |
line = line[len('--editable'):].strip()
|
|
4118 |
req = InstallRequirement.from_editable(
|
|
4119 |
line, comes_from=comes_from, default_vcs=options.default_vcs)
|
|
4120 |
else:
|
|
4121 |
req = InstallRequirement.from_line(line, comes_from)
|
|
4122 |
yield req
|
|
4123 |
|
|
4124 |
############################################################
|
|
4125 |
## Logging
|
|
4126 |
|
|
4127 |
|
|
4128 |
|
|
4129 |
class Logger(object):
|
|
4130 |
|
|
4131 |
"""
|
|
4132 |
Logging object for use in command-line script. Allows ranges of
|
|
4133 |
levels, to avoid some redundancy of displayed information.
|
|
4134 |
"""
|
|
4135 |
|
|
4136 |
VERBOSE_DEBUG = logging.DEBUG-1
|
|
4137 |
DEBUG = logging.DEBUG
|
|
4138 |
INFO = logging.INFO
|
|
4139 |
NOTIFY = (logging.INFO+logging.WARN)/2
|
|
4140 |
WARN = WARNING = logging.WARN
|
|
4141 |
ERROR = logging.ERROR
|
|
4142 |
FATAL = logging.FATAL
|
|
4143 |
|
|
4144 |
LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
|
|
4145 |
|
|
4146 |
def __init__(self, consumers):
|
|
4147 |
self.consumers = consumers
|
|
4148 |
self.indent = 0
|
|
4149 |
self.explicit_levels = False
|
|
4150 |
self.in_progress = None
|
|
4151 |
self.in_progress_hanging = False
|
|
4152 |
|
|
4153 |
def debug(self, msg, *args, **kw):
|
|
4154 |
self.log(self.DEBUG, msg, *args, **kw)
|
|
4155 |
def info(self, msg, *args, **kw):
|
|
4156 |
self.log(self.INFO, msg, *args, **kw)
|
|
4157 |
def notify(self, msg, *args, **kw):
|
|
4158 |
self.log(self.NOTIFY, msg, *args, **kw)
|
|
4159 |
def warn(self, msg, *args, **kw):
|
|
4160 |
self.log(self.WARN, msg, *args, **kw)
|
|
4161 |
def error(self, msg, *args, **kw):
|
|
4162 |
self.log(self.WARN, msg, *args, **kw)
|
|
4163 |
def fatal(self, msg, *args, **kw):
|
|
4164 |
self.log(self.FATAL, msg, *args, **kw)
|
|
4165 |
def log(self, level, msg, *args, **kw):
|
|
4166 |
if args:
|
|
4167 |
if kw:
|
|
4168 |
raise TypeError(
|
|
4169 |
"You may give positional or keyword arguments, not both")
|
|
4170 |
args = args or kw
|
|
4171 |
rendered = None
|
|
4172 |
for consumer_level, consumer in self.consumers:
|
|
4173 |
if self.level_matches(level, consumer_level):
|
|
4174 |
if (self.in_progress_hanging
|
|
4175 |
and consumer in (sys.stdout, sys.stderr)):
|
|
4176 |
self.in_progress_hanging = False
|
|
4177 |
sys.stdout.write('\n')
|
|
4178 |
sys.stdout.flush()
|
|
4179 |
if rendered is None:
|
|
4180 |
if args:
|
|
4181 |
rendered = msg % args
|
|
4182 |
else:
|
|
4183 |
rendered = msg
|
|
4184 |
rendered = ' '*self.indent + rendered
|
|
4185 |
if self.explicit_levels:
|
|
4186 |
## FIXME: should this be a name, not a level number?
|
|
4187 |
rendered = '%02i %s' % (level, rendered)
|
|
4188 |
if hasattr(consumer, 'write'):
|
|
4189 |
consumer.write(rendered+'\n')
|
|
4190 |
else:
|
|
4191 |
consumer(rendered)
|
|
4192 |
|
|
4193 |
def start_progress(self, msg):
|
|
4194 |
assert not self.in_progress, (
|
|
4195 |
"Tried to start_progress(%r) while in_progress %r"
|
|
4196 |
% (msg, self.in_progress))
|
|
4197 |
if self.level_matches(self.NOTIFY, self._stdout_level()):
|
|
4198 |
sys.stdout.write(' '*self.indent + msg)
|
|
4199 |
sys.stdout.flush()
|
|
4200 |
self.in_progress_hanging = True
|
|
4201 |
else:
|
|
4202 |
self.in_progress_hanging = False
|
|
4203 |
self.in_progress = msg
|
|
4204 |
self.last_message = None
|
|
4205 |
|
|
4206 |
def end_progress(self, msg='done.'):
|
|
4207 |
assert self.in_progress, (
|
|
4208 |
"Tried to end_progress without start_progress")
|
|
4209 |
if self.stdout_level_matches(self.NOTIFY):
|
|
4210 |
if not self.in_progress_hanging:
|
|
4211 |
# Some message has been printed out since start_progress
|
|
4212 |
sys.stdout.write('...' + self.in_progress + msg + '\n')
|
|
4213 |
sys.stdout.flush()
|
|
4214 |
else:
|
|
4215 |
# These erase any messages shown with show_progress (besides .'s)
|
|
4216 |
logger.show_progress('')
|
|
4217 |
logger.show_progress('')
|
|
4218 |
sys.stdout.write(msg + '\n')
|
|
4219 |
sys.stdout.flush()
|
|
4220 |
self.in_progress = None
|
|
4221 |
self.in_progress_hanging = False
|
|
4222 |
|
|
4223 |
def show_progress(self, message=None):
|
|
4224 |
"""If we are in a progress scope, and no log messages have been
|
|
4225 |
shown, write out another '.'"""
|
|
4226 |
if self.in_progress_hanging:
|
|
4227 |
if message is None:
|
|
4228 |
sys.stdout.write('.')
|
|
4229 |
sys.stdout.flush()
|
|
4230 |
else:
|
|
4231 |
if self.last_message:
|
|
4232 |
padding = ' ' * max(0, len(self.last_message)-len(message))
|
|
4233 |
else:
|
|
4234 |
padding = ''
|
|
4235 |
sys.stdout.write('\r%s%s%s%s' % (' '*self.indent, self.in_progress, message, padding))
|
|
4236 |
sys.stdout.flush()
|
|
4237 |
self.last_message = message
|
|
4238 |
|
|
4239 |
def stdout_level_matches(self, level):
|
|
4240 |
"""Returns true if a message at this level will go to stdout"""
|
|
4241 |
return self.level_matches(level, self._stdout_level())
|
|
4242 |
|
|
4243 |
def _stdout_level(self):
|
|
4244 |
"""Returns the level that stdout runs at"""
|
|
4245 |
for level, consumer in self.consumers:
|
|
4246 |
if consumer is sys.stdout:
|
|
4247 |
return level
|
|
4248 |
return self.FATAL
|
|
4249 |
|
|
4250 |
def level_matches(self, level, consumer_level):
|
|
4251 |
"""
|
|
4252 |
>>> l = Logger()
|
|
4253 |
>>> l.level_matches(3, 4)
|
|
4254 |
False
|
|
4255 |
>>> l.level_matches(3, 2)
|
|
4256 |
True
|
|
4257 |
>>> l.level_matches(slice(None, 3), 3)
|
|
4258 |
False
|
|
4259 |
>>> l.level_matches(slice(None, 3), 2)
|
|
4260 |
True
|
|
4261 |
>>> l.level_matches(slice(1, 3), 1)
|
|
4262 |
True
|
|
4263 |
>>> l.level_matches(slice(2, 3), 1)
|
|
4264 |
False
|
|
4265 |
"""
|
|
4266 |
if isinstance(level, slice):
|
|
4267 |
start, stop = level.start, level.stop
|
|
4268 |
if start is not None and start > consumer_level:
|
|
4269 |
return False
|
|
4270 |
if stop is not None or stop <= consumer_level:
|
|
4271 |
return False
|
|
4272 |
return True
|
|
4273 |
else:
|
|
4274 |
return level >= consumer_level
|
|
4275 |
|
|
4276 |
@classmethod
|
|
4277 |
def level_for_integer(cls, level):
|
|
4278 |
levels = cls.LEVELS
|
|
4279 |
if level < 0:
|
|
4280 |
return levels[0]
|
|
4281 |
if level >= len(levels):
|
|
4282 |
return levels[-1]
|
|
4283 |
return levels[level]
|
|
4284 |
|
|
4285 |
def move_stdout_to_stderr(self):
|
|
4286 |
to_remove = []
|
|
4287 |
to_add = []
|
|
4288 |
for consumer_level, consumer in self.consumers:
|
|
4289 |
if consumer == sys.stdout:
|
|
4290 |
to_remove.append((consumer_level, consumer))
|
|
4291 |
to_add.append((consumer_level, sys.stderr))
|
|
4292 |
for item in to_remove:
|
|
4293 |
self.consumers.remove(item)
|
|
4294 |
self.consumers.extend(to_add)
|
|
4295 |
|
|
4296 |
|
|
4297 |
def call_subprocess(cmd, show_stdout=True,
|
|
4298 |
filter_stdout=None, cwd=None,
|
|
4299 |
raise_on_returncode=True,
|
|
4300 |
command_level=Logger.DEBUG, command_desc=None,
|
|
4301 |
extra_environ=None):
|
|
4302 |
if command_desc is None:
|
|
4303 |
cmd_parts = []
|
|
4304 |
for part in cmd:
|
|
4305 |
if ' ' in part or '\n' in part or '"' in part or "'" in part:
|
|
4306 |
part = '"%s"' % part.replace('"', '\\"')
|
|
4307 |
cmd_parts.append(part)
|
|
4308 |
command_desc = ' '.join(cmd_parts)
|
|
4309 |
if show_stdout:
|
|
4310 |
stdout = None
|
|
4311 |
else:
|
|
4312 |
stdout = subprocess.PIPE
|
|
4313 |
logger.log(command_level, "Running command %s" % command_desc)
|
|
4314 |
env = os.environ.copy()
|
|
4315 |
if extra_environ:
|
|
4316 |
env.update(extra_environ)
|
|
4317 |
try:
|
|
4318 |
proc = subprocess.Popen(
|
|
4319 |
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
|
|
4320 |
cwd=cwd, env=env)
|
|
4321 |
except Exception, e:
|
|
4322 |
logger.fatal(
|
|
4323 |
"Error %s while executing command %s" % (e, command_desc))
|
|
4324 |
raise
|
|
4325 |
all_output = []
|
|
4326 |
if stdout is not None:
|
|
4327 |
stdout = proc.stdout
|
|
4328 |
while 1:
|
|
4329 |
line = stdout.readline()
|
|
4330 |
if not line:
|
|
4331 |
break
|
|
4332 |
line = line.rstrip()
|
|
4333 |
all_output.append(line + '\n')
|
|
4334 |
if filter_stdout:
|
|
4335 |
level = filter_stdout(line)
|
|
4336 |
if isinstance(level, tuple):
|
|
4337 |
level, line = level
|
|
4338 |
logger.log(level, line)
|
|
4339 |
if not logger.stdout_level_matches(level):
|
|
4340 |
logger.show_progress()
|
|
4341 |
else:
|
|
4342 |
logger.info(line)
|
|
4343 |
else:
|
|
4344 |
returned_stdout, returned_stderr = proc.communicate()
|
|
4345 |
all_output = [returned_stdout or '']
|
|
4346 |
proc.wait()
|
|
4347 |
if proc.returncode:
|
|
4348 |
if raise_on_returncode:
|
|
4349 |
if all_output:
|
|
4350 |
logger.notify('Complete output from command %s:' % command_desc)
|
|
4351 |
logger.notify('\n'.join(all_output) + '\n----------------------------------------')
|
|
4352 |
raise InstallationError(
|
|
4353 |
"Command %s failed with error code %s"
|
|
4354 |
% (command_desc, proc.returncode))
|
|
4355 |
else:
|
|
4356 |
logger.warn(
|
|
4357 |
"Command %s had error code %s"
|
|
4358 |
% (command_desc, proc.returncode))
|
|
4359 |
if stdout is not None:
|
|
4360 |
return ''.join(all_output)
|
|
4361 |
|
|
4362 |
############################################################
|
|
4363 |
## Utility functions
|
|
4364 |
|
|
4365 |
def is_svn_page(html):
|
|
4366 |
"""Returns true if the page appears to be the index page of an svn repository"""
|
|
4367 |
return (re.search(r'<title>[^<]*Revision \d+:', html)
|
|
4368 |
and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
|
|
4369 |
|
|
4370 |
def file_contents(filename):
|
|
4371 |
fp = open(filename, 'rb')
|
|
4372 |
try:
|
|
4373 |
return fp.read()
|
|
4374 |
finally:
|
|
4375 |
fp.close()
|
|
4376 |
|
|
4377 |
def split_leading_dir(path):
|
|
4378 |
path = str(path)
|
|
4379 |
path = path.lstrip('/').lstrip('\\')
|
|
4380 |
if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
|
|
4381 |
or '\\' not in path):
|
|
4382 |
return path.split('/', 1)
|
|
4383 |
elif '\\' in path:
|
|
4384 |
return path.split('\\', 1)
|
|
4385 |
else:
|
|
4386 |
return path, ''
|
|
4387 |
|
|
4388 |
def has_leading_dir(paths):
|
|
4389 |
"""Returns true if all the paths have the same leading path name
|
|
4390 |
(i.e., everything is in one subdirectory in an archive)"""
|
|
4391 |
common_prefix = None
|
|
4392 |
for path in paths:
|
|
4393 |
prefix, rest = split_leading_dir(path)
|
|
4394 |
if not prefix:
|
|
4395 |
return False
|
|
4396 |
elif common_prefix is None:
|
|
4397 |
common_prefix = prefix
|
|
4398 |
elif prefix != common_prefix:
|
|
4399 |
return False
|
|
4400 |
return True
|
|
4401 |
|
|
4402 |
def format_size(bytes):
|
|
4403 |
if bytes > 1000*1000:
|
|
4404 |
return '%.1fMb' % (bytes/1000.0/1000)
|
|
4405 |
elif bytes > 10*1000:
|
|
4406 |
return '%iKb' % (bytes/1000)
|
|
4407 |
elif bytes > 1000:
|
|
4408 |
return '%.1fKb' % (bytes/1000.0)
|
|
4409 |
else:
|
|
4410 |
return '%ibytes' % bytes
|
|
4411 |
|
|
4412 |
_normalize_re = re.compile(r'[^a-z]', re.I)
|
|
4413 |
|
|
4414 |
def normalize_name(name):
|
|
4415 |
return _normalize_re.sub('-', name.lower())
|
|
4416 |
|
|
4417 |
def make_path_relative(path, rel_to):
|
|
4418 |
"""
|
|
4419 |
Make a filename relative, where the filename path, and it is
|
|
4420 |
relative to rel_to
|
|
4421 |
|
|
4422 |
>>> make_relative_path('/usr/share/something/a-file.pth',
|
|
4423 |
... '/usr/share/another-place/src/Directory')
|
|
4424 |
'../../../something/a-file.pth'
|
|
4425 |
>>> make_relative_path('/usr/share/something/a-file.pth',
|
|
4426 |
... '/home/user/src/Directory')
|
|
4427 |
'../../../usr/share/something/a-file.pth'
|
|
4428 |
>>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
|
|
4429 |
'a-file.pth'
|
|
4430 |
"""
|
|
4431 |
path_filename = os.path.basename(path)
|
|
4432 |
path = os.path.dirname(path)
|
|
4433 |
path = os.path.normpath(os.path.abspath(path))
|
|
4434 |
rel_to = os.path.normpath(os.path.abspath(rel_to))
|
|
4435 |
path_parts = path.strip(os.path.sep).split(os.path.sep)
|
|
4436 |
rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
|
|
4437 |
while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
|
|
4438 |
path_parts.pop(0)
|
|
4439 |
rel_to_parts.pop(0)
|
|
4440 |
full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
|
|
4441 |
if full_parts == ['']:
|
|
4442 |
return '.' + os.path.sep
|
|
4443 |
return os.path.sep.join(full_parts)
|
|
4444 |
|
|
4445 |
def display_path(path):
|
|
4446 |
"""Gives the display value for a given path, making it relative to cwd
|
|
4447 |
if possible."""
|
|
4448 |
path = os.path.normcase(os.path.abspath(path))
|
|
4449 |
if path.startswith(os.getcwd() + os.path.sep):
|
|
4450 |
path = '.' + path[len(os.getcwd()):]
|
|
4451 |
return path
|
|
4452 |
|
|
4453 |
def parse_editable(editable_req, default_vcs=None):
|
|
4454 |
"""Parses svn+http://blahblah@rev#egg=Foobar into a requirement
|
|
4455 |
(Foobar) and a URL"""
|
|
4456 |
url = editable_req
|
|
4457 |
if os.path.isdir(url) and os.path.exists(os.path.join(url, 'setup.py')):
|
|
4458 |
# Treating it as code that has already been checked out
|
|
4459 |
url = filename_to_url(url)
|
|
4460 |
if url.lower().startswith('file:'):
|
|
4461 |
return None, url
|
|
4462 |
for version_control in vcs:
|
|
4463 |
if url.lower().startswith('%s:' % version_control):
|
|
4464 |
url = '%s+%s' % (version_control, url)
|
|
4465 |
if '+' not in url:
|
|
4466 |
if default_vcs:
|
|
4467 |
url = default_vcs + '+' + url
|
|
4468 |
else:
|
|
4469 |
raise InstallationError(
|
|
4470 |
'--editable=%s should be formatted with svn+URL, git+URL, hg+URL or bzr+URL' % editable_req)
|
|
4471 |
vc_type = url.split('+', 1)[0].lower()
|
|
4472 |
if not vcs.get_backend(vc_type):
|
|
4473 |
raise InstallationError(
|
|
4474 |
'For --editable=%s only svn (svn+URL), Git (git+URL), Mercurial (hg+URL) and Bazaar (bzr+URL) is currently supported' % editable_req)
|
|
4475 |
match = re.search(r'(?:#|#.*?&)egg=([^&]*)', editable_req)
|
|
4476 |
if (not match or not match.group(1)) and vcs.get_backend(vc_type):
|
|
4477 |
parts = [p for p in editable_req.split('#', 1)[0].split('/') if p]
|
|
4478 |
if parts[-2] in ('tags', 'branches', 'tag', 'branch'):
|
|
4479 |
req = parts[-3]
|
|
4480 |
elif parts[-1] == 'trunk':
|
|
4481 |
req = parts[-2]
|
|
4482 |
else:
|
|
4483 |
raise InstallationError(
|
|
4484 |
'--editable=%s is not the right format; it must have #egg=Package'
|
|
4485 |
% editable_req)
|
|
4486 |
else:
|
|
4487 |
req = match.group(1)
|
|
4488 |
## FIXME: use package_to_requirement?
|
|
4489 |
match = re.search(r'^(.*?)(?:-dev|-\d.*)', req)
|
|
4490 |
if match:
|
|
4491 |
# Strip off -dev, -0.2, etc.
|
|
4492 |
req = match.group(1)
|
|
4493 |
return req, url
|
|
4494 |
|
|
4495 |
def backup_dir(dir, ext='.bak'):
|
|
4496 |
"""Figure out the name of a directory to back up the given dir to
|
|
4497 |
(adding .bak, .bak2, etc)"""
|
|
4498 |
n = 1
|
|
4499 |
extension = ext
|
|
4500 |
while os.path.exists(dir + extension):
|
|
4501 |
n += 1
|
|
4502 |
extension = ext + str(n)
|
|
4503 |
return dir + extension
|
|
4504 |
|
|
4505 |
def ask(message, options):
|
|
4506 |
"""Ask the message interactively, with the given possible responses"""
|
|
4507 |
while 1:
|
|
4508 |
if os.environ.get('PIP_NO_INPUT'):
|
|
4509 |
raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
|
|
4510 |
response = raw_input(message)
|
|
4511 |
response = response.strip().lower()
|
|
4512 |
if response not in options:
|
|
4513 |
print 'Your response (%r) was not one of the expected responses: %s' % (
|
|
4514 |
response, ', '.join(options))
|
|
4515 |
else:
|
|
4516 |
return response
|
|
4517 |
|
|
4518 |
def open_logfile_append(filename):
|
|
4519 |
"""Open the named log file in append mode.
|
|
4520 |
|
|
4521 |
If the file already exists, a separator will also be printed to
|
|
4522 |
the file to separate past activity from current activity.
|
|
4523 |
"""
|
|
4524 |
exists = os.path.exists(filename)
|
|
4525 |
log_fp = open(filename, 'a')
|
|
4526 |
if exists:
|
|
4527 |
print >> log_fp, '-'*60
|
|
4528 |
print >> log_fp, '%s run on %s' % (sys.argv[0], time.strftime('%c'))
|
|
4529 |
return log_fp
|
|
4530 |
|
|
4531 |
def is_url(name):
|
|
4532 |
"""Returns true if the name looks like a URL"""
|
|
4533 |
if ':' not in name:
|
|
4534 |
return False
|
|
4535 |
scheme = name.split(':', 1)[0].lower()
|
|
4536 |
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
|
|
4537 |
|
|
4538 |
def is_filename(name):
|
|
4539 |
if (splitext(name)[1].lower() in ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle')
|
|
4540 |
and os.path.exists(name)):
|
|
4541 |
return True
|
|
4542 |
if os.path.sep not in name and '/' not in name:
|
|
4543 |
# Doesn't have any path components, probably a requirement like 'Foo'
|
|
4544 |
return False
|
|
4545 |
return True
|
|
4546 |
|
|
4547 |
_drive_re = re.compile('^([a-z]):', re.I)
|
|
4548 |
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
|
|
4549 |
|
|
4550 |
def filename_to_url(filename):
|
|
4551 |
"""
|
|
4552 |
Convert a path to a file: URL. The path will be made absolute.
|
|
4553 |
"""
|
|
4554 |
filename = os.path.normcase(os.path.abspath(filename))
|
|
4555 |
if _drive_re.match(filename):
|
|
4556 |
filename = filename[0] + '|' + filename[2:]
|
|
4557 |
url = urllib.quote(filename)
|
|
4558 |
url = url.replace(os.path.sep, '/')
|
|
4559 |
url = url.lstrip('/')
|
|
4560 |
return 'file:///' + url
|
|
4561 |
|
|
4562 |
def filename_to_url2(filename):
|
|
4563 |
"""
|
|
4564 |
Convert a path to a file: URL. The path will be made absolute and have
|
|
4565 |
quoted path parts.
|
|
4566 |
"""
|
|
4567 |
filename = os.path.normcase(os.path.abspath(filename))
|
|
4568 |
drive, filename = os.path.splitdrive(filename)
|
|
4569 |
filepath = filename.split(os.path.sep)
|
|
4570 |
url = '/'.join([urllib.quote(part) for part in filepath])
|
|
4571 |
if not drive:
|
|
4572 |
url = url.lstrip('/')
|
|
4573 |
return 'file:///' + drive + url
|
|
4574 |
|
|
4575 |
def url_to_filename(url):
|
|
4576 |
"""
|
|
4577 |
Convert a file: URL to a path.
|
|
4578 |
"""
|
|
4579 |
assert url.startswith('file:'), (
|
|
4580 |
"You can only turn file: urls into filenames (not %r)" % url)
|
|
4581 |
filename = url[len('file:'):].lstrip('/')
|
|
4582 |
filename = urllib.unquote(filename)
|
|
4583 |
if _url_drive_re.match(filename):
|
|
4584 |
filename = filename[0] + ':' + filename[2:]
|
|
4585 |
else:
|
|
4586 |
filename = '/' + filename
|
|
4587 |
return filename
|
|
4588 |
|
|
4589 |
def get_requirement_from_url(url):
|
|
4590 |
"""Get a requirement from the URL, if possible. This looks for #egg
|
|
4591 |
in the URL"""
|
|
4592 |
link = Link(url)
|
|
4593 |
egg_info = link.egg_fragment
|
|
4594 |
if not egg_info:
|
|
4595 |
egg_info = splitext(link.filename)[0]
|
|
4596 |
return package_to_requirement(egg_info)
|
|
4597 |
|
|
4598 |
def package_to_requirement(package_name):
|
|
4599 |
"""Translate a name like Foo-1.2 to Foo==1.3"""
|
|
4600 |
match = re.search(r'^(.*?)(-dev|-\d.*)', package_name)
|
|
4601 |
if match:
|
|
4602 |
name = match.group(1)
|
|
4603 |
version = match.group(2)
|
|
4604 |
else:
|
|
4605 |
name = package_name
|
|
4606 |
version = ''
|
|
4607 |
if version:
|
|
4608 |
return '%s==%s' % (name, version)
|
|
4609 |
else:
|
|
4610 |
return name
|
|
4611 |
|
|
4612 |
def is_framework_layout(path):
|
|
4613 |
"""Return True if the current platform is the default Python of Mac OS X
|
|
4614 |
which installs scripts in /usr/local/bin"""
|
|
4615 |
return (sys.platform[:6] == 'darwin' and
|
|
4616 |
(path[:9] == '/Library/' or path[:16] == '/System/Library/'))
|
|
4617 |
|
|
4618 |
def strip_prefix(path, prefix):
|
|
4619 |
""" If ``path`` begins with ``prefix``, return ``path`` with
|
|
4620 |
``prefix`` stripped off. Otherwise return None."""
|
|
4621 |
prefixes = [prefix]
|
|
4622 |
# Yep, we are special casing the framework layout of MacPython here
|
|
4623 |
if is_framework_layout(sys.prefix):
|
|
4624 |
for location in ('/Library', '/usr/local'):
|
|
4625 |
if path.startswith(location):
|
|
4626 |
prefixes.append(location)
|
|
4627 |
for prefix in prefixes:
|
|
4628 |
if path.startswith(prefix):
|
|
4629 |
return prefix, path.replace(prefix + os.path.sep, '')
|
|
4630 |
return None, None
|
|
4631 |
|
|
4632 |
class UninstallPathSet(object):
|
|
4633 |
"""A set of file paths to be removed in the uninstallation of a
|
|
4634 |
requirement."""
|
|
4635 |
def __init__(self, dist, restrict_to_prefix):
|
|
4636 |
self.paths = set()
|
|
4637 |
self._refuse = set()
|
|
4638 |
self.pth = {}
|
|
4639 |
self.prefix = os.path.normcase(os.path.realpath(restrict_to_prefix))
|
|
4640 |
self.dist = dist
|
|
4641 |
self.location = dist.location
|
|
4642 |
self.save_dir = None
|
|
4643 |
self._moved_paths = []
|
|
4644 |
|
|
4645 |
def _can_uninstall(self):
|
|
4646 |
prefix, stripped = strip_prefix(self.location, self.prefix)
|
|
4647 |
if not stripped:
|
|
4648 |
logger.notify("Not uninstalling %s at %s, outside environment %s"
|
|
4649 |
% (self.dist.project_name, self.dist.location,
|
|
4650 |
self.prefix))
|
|
4651 |
return False
|
|
4652 |
return True
|
|
4653 |
|
|
4654 |
def add(self, path):
|
|
4655 |
path = os.path.abspath(path)
|
|
4656 |
if not os.path.exists(path):
|
|
4657 |
return
|
|
4658 |
prefix, stripped = strip_prefix(os.path.normcase(path), self.prefix)
|
|
4659 |
if stripped:
|
|
4660 |
self.paths.add((prefix, stripped))
|
|
4661 |
else:
|
|
4662 |
self._refuse.add((prefix, path))
|
|
4663 |
|
|
4664 |
def add_pth(self, pth_file, entry):
|
|
4665 |
prefix, stripped = strip_prefix(os.path.normcase(pth_file), self.prefix)
|
|
4666 |
if stripped:
|
|
4667 |
entry = os.path.normcase(entry)
|
|
4668 |
if stripped not in self.pth:
|
|
4669 |
self.pth[stripped] = UninstallPthEntries(os.path.join(prefix, stripped))
|
|
4670 |
self.pth[stripped].add(os.path.normcase(entry))
|
|
4671 |
else:
|
|
4672 |
self._refuse.add((prefix, pth_file))
|
|
4673 |
|
|
4674 |
def compact(self, paths):
|
|
4675 |
"""Compact a path set to contain the minimal number of paths
|
|
4676 |
necessary to contain all paths in the set. If /a/path/ and
|
|
4677 |
/a/path/to/a/file.txt are both in the set, leave only the
|
|
4678 |
shorter path."""
|
|
4679 |
short_paths = set()
|
|
4680 |
def sort_set(x, y):
|
|
4681 |
prefix_x, path_x = x
|
|
4682 |
prefix_y, path_y = y
|
|
4683 |
return cmp(len(path_x), len(path_y))
|
|
4684 |
for prefix, path in sorted(paths, sort_set):
|
|
4685 |
if not any([(path.startswith(shortpath) and
|
|
4686 |
path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
|
|
4687 |
for shortprefix, shortpath in short_paths]):
|
|
4688 |
short_paths.add((prefix, path))
|
|
4689 |
return short_paths
|
|
4690 |
|
|
4691 |
def remove(self, auto_confirm=False):
|
|
4692 |
"""Remove paths in ``self.paths`` with confirmation (unless
|
|
4693 |
``auto_confirm`` is True)."""
|
|
4694 |
if not self._can_uninstall():
|
|
4695 |
return
|
|
4696 |
logger.notify('Uninstalling %s:' % self.dist.project_name)
|
|
4697 |
logger.indent += 2
|
|
4698 |
paths = sorted(self.compact(self.paths))
|
|
4699 |
try:
|
|
4700 |
if auto_confirm:
|
|
4701 |
response = 'y'
|
|
4702 |
else:
|
|
4703 |
for prefix, path in paths:
|
|
4704 |
logger.notify(os.path.join(prefix, path))
|
|
4705 |
response = ask('Proceed (y/n)? ', ('y', 'n'))
|
|
4706 |
if self._refuse:
|
|
4707 |
logger.notify('Not removing or modifying (outside of prefix):')
|
|
4708 |
for prefix, path in self.compact(self._refuse):
|
|
4709 |
logger.notify(os.path.join(prefix, path))
|
|
4710 |
if response == 'y':
|
|
4711 |
self.save_dir = tempfile.mkdtemp('-uninstall', 'pip-')
|
|
4712 |
for prefix, path in paths:
|
|
4713 |
full_path = os.path.join(prefix, path)
|
|
4714 |
new_path = os.path.join(self.save_dir, path)
|
|
4715 |
new_dir = os.path.dirname(new_path)
|
|
4716 |
logger.info('Removing file or directory %s' % full_path)
|
|
4717 |
self._moved_paths.append((prefix, path))
|
|
4718 |
os.renames(full_path, new_path)
|
|
4719 |
for pth in self.pth.values():
|
|
4720 |
pth.remove()
|
|
4721 |
logger.notify('Successfully uninstalled %s' % self.dist.project_name)
|
|
4722 |
|
|
4723 |
finally:
|
|
4724 |
logger.indent -= 2
|
|
4725 |
|
|
4726 |
def rollback(self):
|
|
4727 |
"""Rollback the changes previously made by remove()."""
|
|
4728 |
if self.save_dir is None:
|
|
4729 |
logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name)
|
|
4730 |
return False
|
|
4731 |
logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
|
|
4732 |
for prefix, path in self._moved_paths:
|
|
4733 |
tmp_path = os.path.join(self.save_dir, path)
|
|
4734 |
real_path = os.path.join(prefix, path)
|
|
4735 |
logger.info('Replacing %s' % real_path)
|
|
4736 |
os.renames(tmp_path, real_path)
|
|
4737 |
for pth in self.pth:
|
|
4738 |
pth.rollback()
|
|
4739 |
|
|
4740 |
def commit(self):
|
|
4741 |
"""Remove temporary save dir: rollback will no longer be possible."""
|
|
4742 |
if self.save_dir is not None:
|
|
4743 |
shutil.rmtree(self.save_dir)
|
|
4744 |
self.save_dir = None
|
|
4745 |
self._moved_paths = []
|
|
4746 |
|
|
4747 |
|
|
4748 |
class UninstallPthEntries(object):
|
|
4749 |
def __init__(self, pth_file):
|
|
4750 |
if not os.path.isfile(pth_file):
|
|
4751 |
raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file)
|
|
4752 |
self.file = pth_file
|
|
4753 |
self.entries = set()
|
|
4754 |
self._saved_lines = None
|
|
4755 |
|
|
4756 |
def add(self, entry):
|
|
4757 |
self.entries.add(entry)
|
|
4758 |
|
|
4759 |
def remove(self):
|
|
4760 |
logger.info('Removing pth entries from %s:' % self.file)
|
|
4761 |
fh = open(self.file, 'r')
|
|
4762 |
lines = fh.readlines()
|
|
4763 |
self._saved_lines = lines
|
|
4764 |
fh.close()
|
|
4765 |
try:
|
|
4766 |
for entry in self.entries:
|
|
4767 |
logger.info('Removing entry: %s' % entry)
|
|
4768 |
try:
|
|
4769 |
lines.remove(entry + '\n')
|
|
4770 |
except ValueError:
|
|
4771 |
pass
|
|
4772 |
finally:
|
|
4773 |
pass
|
|
4774 |
fh = open(self.file, 'w')
|
|
4775 |
fh.writelines(lines)
|
|
4776 |
fh.close()
|
|
4777 |
|
|
4778 |
def rollback(self):
|
|
4779 |
if self._saved_lines is None:
|
|
4780 |
logger.error('Cannot roll back changes to %s, none were made' % self.file)
|
|
4781 |
return False
|
|
4782 |
logger.info('Rolling %s back to previous state' % self.file)
|
|
4783 |
fh = open(self.file, 'w')
|
|
4784 |
fh.writelines(self._saved_lines)
|
|
4785 |
fh.close()
|
|
4786 |
return True
|
|
4787 |
|
|
4788 |
class FakeFile(object):
|
|
4789 |
"""Wrap a list of lines in an object with readline() to make
|
|
4790 |
ConfigParser happy."""
|
|
4791 |
def __init__(self, lines):
|
|
4792 |
self._gen = (l for l in lines)
|
|
4793 |
|
|
4794 |
def readline(self):
|
|
4795 |
try:
|
|
4796 |
return self._gen.next()
|
|
4797 |
except StopIteration:
|
|
4798 |
return ''
|
|
4799 |
|
|
4800 |
def splitext(path):
|
|
4801 |
"""Like os.path.splitext, but take off .tar too"""
|
|
4802 |
base, ext = posixpath.splitext(path)
|
|
4803 |
if base.lower().endswith('.tar'):
|
|
4804 |
ext = base[-4:] + ext
|
|
4805 |
base = base[:-4]
|
|
4806 |
return base, ext
|
|
4807 |
|
|
4808 |
def find_command(cmd, paths=None, pathext=None):
|
|
4809 |
"""Searches the PATH for the given command and returns its path"""
|
|
4810 |
if paths is None:
|
|
4811 |
paths = os.environ.get('PATH', []).split(os.pathsep)
|
|
4812 |
if isinstance(paths, basestring):
|
|
4813 |
paths = [paths]
|
|
4814 |
# check if there are funny path extensions for executables, e.g. Windows
|
|
4815 |
if pathext is None:
|
|
4816 |
pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
|
|
4817 |
pathext = [ext for ext in pathext.lower().split(os.pathsep)]
|
|
4818 |
# don't use extensions if the command ends with one of them
|
|
4819 |
if os.path.splitext(cmd)[1].lower() in pathext:
|
|
4820 |
pathext = ['']
|
|
4821 |
# check if we find the command on PATH
|
|
4822 |
for path in paths:
|
|
4823 |
# try without extension first
|
|
4824 |
cmd_path = os.path.join(path, cmd)
|
|
4825 |
for ext in pathext:
|
|
4826 |
# then including the extension
|
|
4827 |
cmd_path_ext = cmd_path + ext
|
|
4828 |
if os.path.exists(cmd_path_ext):
|
|
4829 |
return cmd_path_ext
|
|
4830 |
if os.path.exists(cmd_path):
|
|
4831 |
return cmd_path
|
|
4832 |
return None
|
|
4833 |
|
|
4834 |
class _Inf(object):
|
|
4835 |
"""I am bigger than everything!"""
|
|
4836 |
def __cmp__(self, a):
|
|
4837 |
if self is a:
|
|
4838 |
return 0
|
|
4839 |
return 1
|
|
4840 |
def __repr__(self):
|
|
4841 |
return 'Inf'
|
|
4842 |
Inf = _Inf()
|
|
4843 |
del _Inf
|
|
4844 |
|
|
4845 |
if __name__ == '__main__':
|
|
4846 |
exit = main()
|
|
4847 |
if exit:
|
|
4848 |
sys.exit(exit)
|