Changes in path of some of the files ...
authoramit@thunder
Sat, 13 Feb 2010 12:29:22 +0530
changeset 3 6cee07c589cb
parent 2 52d12eb31c30
child 4 231572ecfb59
Changes in path of some of the files ...
SEESenv/bin/activate
SEESenv/bin/activate_this.py
SEESenv/bin/easy_install
SEESenv/bin/easy_install-2.6
SEESenv/bin/pip
SEESenv/bin/python
SEESenv/include/python2.6
SEESenv/lib/python2.6/UserDict.py
SEESenv/lib/python2.6/UserDict.pyc
SEESenv/lib/python2.6/_abcoll.py
SEESenv/lib/python2.6/_abcoll.pyc
SEESenv/lib/python2.6/abc.py
SEESenv/lib/python2.6/abc.pyc
SEESenv/lib/python2.6/codecs.py
SEESenv/lib/python2.6/codecs.pyc
SEESenv/lib/python2.6/config
SEESenv/lib/python2.6/copy_reg.py
SEESenv/lib/python2.6/copy_reg.pyc
SEESenv/lib/python2.6/distutils/__init__.py
SEESenv/lib/python2.6/distutils/__init__.pyc
SEESenv/lib/python2.6/distutils/distutils.cfg
SEESenv/lib/python2.6/encodings
SEESenv/lib/python2.6/fnmatch.py
SEESenv/lib/python2.6/fnmatch.pyc
SEESenv/lib/python2.6/genericpath.py
SEESenv/lib/python2.6/genericpath.pyc
SEESenv/lib/python2.6/lib-dynload
SEESenv/lib/python2.6/linecache.py
SEESenv/lib/python2.6/linecache.pyc
SEESenv/lib/python2.6/locale.py
SEESenv/lib/python2.6/locale.pyc
SEESenv/lib/python2.6/no-global-site-packages.txt
SEESenv/lib/python2.6/ntpath.py
SEESenv/lib/python2.6/ntpath.pyc
SEESenv/lib/python2.6/orig-prefix.txt
SEESenv/lib/python2.6/os.py
SEESenv/lib/python2.6/os.pyc
SEESenv/lib/python2.6/posixpath.py
SEESenv/lib/python2.6/posixpath.pyc
SEESenv/lib/python2.6/re.py
SEESenv/lib/python2.6/re.pyc
SEESenv/lib/python2.6/site-packages/easy-install.pth
SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/PKG-INFO
SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/SOURCES.txt
SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/dependency_links.txt
SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/not-zip-safe
SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/scripts/pip
SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/top_level.txt
SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/pip.py
SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/pip.pyc
SEESenv/lib/python2.6/site-packages/setuptools-0.6c11-py2.6.egg
SEESenv/lib/python2.6/site-packages/setuptools.pth
SEESenv/lib/python2.6/site.py
SEESenv/lib/python2.6/site.pyc
SEESenv/lib/python2.6/sre.py
SEESenv/lib/python2.6/sre.pyc
SEESenv/lib/python2.6/sre_compile.py
SEESenv/lib/python2.6/sre_compile.pyc
SEESenv/lib/python2.6/sre_constants.py
SEESenv/lib/python2.6/sre_constants.pyc
SEESenv/lib/python2.6/sre_parse.py
SEESenv/lib/python2.6/sre_parse.pyc
SEESenv/lib/python2.6/stat.py
SEESenv/lib/python2.6/stat.pyc
SEESenv/lib/python2.6/types.py
SEESenv/lib/python2.6/types.pyc
SEESenv/lib/python2.6/warnings.py
SEESenv/lib/python2.6/warnings.pyc
SEESenv/web/hgbook/comments/views.py
SEESenv/web/hgbook/comments/views.py~
SEESenv/web/hgbook/run.wsgi
SEESenv/web/hgbook/run.wsgi~
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/bin/activate	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,56 @@
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+deactivate () {
+    if [ -n "$_OLD_VIRTUAL_PATH" ] ; then
+        PATH="$_OLD_VIRTUAL_PATH"
+        export PATH
+        unset _OLD_VIRTUAL_PATH
+    fi
+
+    # This should detect bash and zsh, which have a hash command that must
+    # be called to get it to forget past commands.  Without forgetting
+    # past commands the $PATH changes we made may not be respected
+    if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
+        hash -r
+    fi
+
+    if [ -n "$_OLD_VIRTUAL_PS1" ] ; then
+        PS1="$_OLD_VIRTUAL_PS1"
+        export PS1
+        unset _OLD_VIRTUAL_PS1
+    fi
+
+    unset VIRTUAL_ENV
+    if [ ! "$1" = "nondestructive" ] ; then
+    # Self destruct!
+        unset -f deactivate
+    fi
+}
+
+# unset irrelavent variables
+deactivate nondestructive
+
+VIRTUAL_ENV="/home/amit/SEES-hacks/SEESenv"
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/bin:$PATH"
+export PATH
+
+_OLD_VIRTUAL_PS1="$PS1"
+if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
+    # special case for Aspen magic directories
+    # see http://www.zetadev.com/software/aspen/
+    PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
+else
+    PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
+fi
+export PS1
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands.  Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
+    hash -r
+fi
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/bin/activate_this.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,29 @@
+"""By using execfile(this_file, dict(__file__=this_file)) you will
+activate this virtualenv environment.
+
+This can be used when you must use an existing Python interpreter, not
+the virtualenv bin/python
+"""
+
+try:
+    __file__
+except NameError:
+    raise AssertionError(
+        "You must run this like execfile('path/to/active_this.py', dict(__file__='path/to/activate_this.py'))")
+import sys
+import os
+
+base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
+prev_sys_path = list(sys.path)
+import site
+site.addsitedir(site_packages)
+sys.real_prefix = sys.prefix
+sys.prefix = base
+# Move the added items to the front of the path:
+new_sys_path = []
+for item in list(sys.path):
+    if item not in prev_sys_path:
+        new_sys_path.append(item)
+        sys.path.remove(item)
+sys.path[:0] = new_sys_path
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/bin/easy_install	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,9 @@
+#!/home/amit/SEES-hacks/SEESenv/bin/python
+# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==0.6c11','console_scripts','easy_install'
+__requires__ = 'setuptools==0.6c11'
+import sys
+from pkg_resources import load_entry_point
+
+sys.exit(
+   load_entry_point('setuptools==0.6c11', 'console_scripts', 'easy_install')()
+)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/bin/easy_install-2.6	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,9 @@
+#!/home/amit/SEES-hacks/SEESenv/bin/python
+# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==0.6c11','console_scripts','easy_install-2.6'
+__requires__ = 'setuptools==0.6c11'
+import sys
+from pkg_resources import load_entry_point
+
+sys.exit(
+   load_entry_point('setuptools==0.6c11', 'console_scripts', 'easy_install-2.6')()
+)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/bin/pip	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,5 @@
+#!/home/amit/SEES-hacks/SEESenv/bin/python
+# EASY-INSTALL-SCRIPT: 'pip==0.6.1','pip'
+__requires__ = 'pip==0.6.1'
+import pkg_resources
+pkg_resources.run_script('pip==0.6.1', 'pip')
Binary file SEESenv/bin/python has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/include/python2.6	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/include/python2.6
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/UserDict.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/UserDict.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/UserDict.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/UserDict.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/_abcoll.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/_abcoll.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/_abcoll.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/_abcoll.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/abc.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/abc.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/abc.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/abc.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/codecs.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/codecs.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/codecs.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/codecs.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/config	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/config
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/copy_reg.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/copy_reg.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/copy_reg.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/copy_reg.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/distutils/__init__.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,87 @@
+import os
+import sys
+import warnings 
+import ConfigParser # ConfigParser is not a virtualenv module, so we can use it to find the stdlib
+
+dirname = os.path.dirname
+
+distutils_path = os.path.join(os.path.dirname(ConfigParser.__file__), 'distutils')
+if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)):
+    warnings.warn(
+        "The virtualenv distutils package at %s appears to be in the same location as the system distutils?")
+else:
+    __path__.insert(0, distutils_path)
+    exec open(os.path.join(distutils_path, '__init__.py')).read()
+
+import dist
+import sysconfig
+
+
+## patch build_ext (distutils doesn't know how to get the libs directory
+## path on windows - it hardcodes the paths around the patched sys.prefix)
+
+if sys.platform == 'win32':
+    from distutils.command.build_ext import build_ext as old_build_ext
+    class build_ext(old_build_ext):
+        def finalize_options (self):
+            if self.library_dirs is None:
+                self.library_dirs = []
+            
+            self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs"))
+            old_build_ext.finalize_options(self)
+            
+    from distutils.command import build_ext as build_ext_module 
+    build_ext_module.build_ext = build_ext
+
+## distutils.dist patches:
+
+old_find_config_files = dist.Distribution.find_config_files
+def find_config_files(self):
+    found = old_find_config_files(self)
+    system_distutils = os.path.join(distutils_path, 'distutils.cfg')
+    #if os.path.exists(system_distutils):
+    #    found.insert(0, system_distutils)
+        # What to call the per-user config file
+    if os.name == 'posix':
+        user_filename = ".pydistutils.cfg"
+    else:
+        user_filename = "pydistutils.cfg"
+    user_filename = os.path.join(sys.prefix, user_filename)
+    if os.path.isfile(user_filename):
+        for item in list(found):
+            if item.endswith('pydistutils.cfg'):
+                found.remove(item)
+        found.append(user_filename)
+    return found
+dist.Distribution.find_config_files = find_config_files
+
+## distutils.sysconfig patches:
+
+old_get_python_inc = sysconfig.get_python_inc
+def sysconfig_get_python_inc(plat_specific=0, prefix=None):
+    if prefix is None:
+        prefix = sys.real_prefix
+    return old_get_python_inc(plat_specific, prefix)
+sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__
+sysconfig.get_python_inc = sysconfig_get_python_inc
+
+old_get_python_lib = sysconfig.get_python_lib
+def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+    if standard_lib and prefix is None:
+        prefix = sys.real_prefix
+    return old_get_python_lib(plat_specific, standard_lib, prefix)
+sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__
+sysconfig.get_python_lib = sysconfig_get_python_lib
+
+old_get_config_vars = sysconfig.get_config_vars
+def sysconfig_get_config_vars(*args):
+    real_vars = old_get_config_vars(*args)
+    if sys.platform == 'win32':
+        lib_dir = os.path.join(sys.real_prefix, "libs")
+        if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars:
+            real_vars['LIBDIR'] = lib_dir # asked for all
+        elif isinstance(real_vars, list) and 'LIBDIR' in args:
+            real_vars = real_vars + [lib_dir] # asked for list
+    return real_vars
+sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__
+sysconfig.get_config_vars = sysconfig_get_config_vars
Binary file SEESenv/lib/python2.6/distutils/__init__.pyc has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/distutils/distutils.cfg	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,6 @@
+# This is a config file local to this virtualenv installation
+# You may include options that will be used by all distutils commands,
+# and by easy_install.  For instance:
+#
+#   [easy_install]
+#   find_links = http://mylocalsite
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/encodings	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/encodings
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/fnmatch.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/fnmatch.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/fnmatch.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/fnmatch.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/genericpath.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/genericpath.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/genericpath.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/genericpath.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/lib-dynload	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/lib-dynload
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/linecache.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/linecache.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/linecache.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/linecache.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/locale.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/locale.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/locale.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/locale.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/ntpath.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/ntpath.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/ntpath.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/ntpath.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/orig-prefix.txt	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/os.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/os.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/os.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/os.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/posixpath.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/posixpath.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/posixpath.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/posixpath.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/re.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/re.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/re.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/re.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/site-packages/easy-install.pth	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,4 @@
+import sys; sys.__plen = len(sys.path)
+./setuptools-0.6c11-py2.6.egg
+./pip-0.6.1-py2.6.egg
+import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/PKG-INFO	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,278 @@
+Metadata-Version: 1.0
+Name: pip
+Version: 0.6.1
+Summary: pip installs packages.  Python packages.  An easy_install replacement
+Home-page: http://pip.openplans.org
+Author: The Open Planning Project
+Author-email: python-virtualenv@groups.google.com
+License: MIT
+Description: \
+        The main website for pip is `pip.openplans.org
+        <http://pip.openplans.org>`_.  You can also install
+        the `in-development version <http://bitbucket.org/ianb/pip/get/tip.gz#egg=pip-dev>`_
+        of pip with ``easy_install pip==dev``.
+        
+        
+        Introduction
+        ------------
+        
+        pip is a replacement for `easy_install
+        <http://peak.telecommunity.com/DevCenter/EasyInstall>`_.  It uses mostly the
+        same techniques for finding packages, so packages that were made
+        easy_installable should be pip-installable as well.
+        
+        pip is meant to improve on easy_install.  Some of the improvements:
+        
+        * All packages are downloaded before installation.  Partially-completed
+        installation doesn't occur as a result.
+        
+        * Care is taken to present useful output on the console.
+        
+        * The reasons for actions are kept track of.  For instance, if a package is
+        being installed, pip keeps track of why that package was required.
+        
+        * Error messages should be useful.
+        
+        * The code is relatively concise and cohesive, making it easier to use
+        programmatically.
+        
+        * Packages don't have to be installed as egg archives, they can be installed
+        flat (while keeping the egg metadata).
+        
+        * Native support for other version control systems (Git, Mercurial and Bazaar)
+        
+        * Uninstallation of packages.
+        
+        * Simple to define fixed sets of requirements and reliably reproduce a
+        set of packages.
+        
+        pip is complementary with `virtualenv
+        <http://pypi.python.org/pypi/virtualenv>`_, and it is encouraged that you use
+        virtualenv to isolate your installation.
+        
+        Community
+        ---------
+        
+        The homepage for pip is temporarily located `on PyPI
+        <http://pypi.python.org/pypi/pip>`_ -- a more proper homepage will
+        follow.  Bugs can go on the `pip issue tracker
+        <http://bitbucket.org/ianb/pip/issues/>`_.  Discussion should happen on the
+        `virtualenv email group
+        <http://groups.google.com/group/python-virtualenv?hl=en>`_.
+        
+        Differences From easy_install
+        -----------------------------
+        
+        pip cannot install some packages.  Specifically:
+        
+        * It cannot install from eggs.  It only installs from source.  (In the future it would be good if it could install binaries from Windows ``.exe`` or ``.msi`` -- binary install on other platforms is not a priority.)
+        
+        * It doesn't understand Setuptools extras (like ``package[test]``).  This should
+        be added eventually.
+        
+        * It is incompatible with some packages that customize distutils or setuptools
+        in their ``setup.py`` files.
+        
+        * Maybe it doesn't work on Windows.  At least, the author doesn't test on
+        Windows often.
+        
+        * It also has some extra features.  Extra features the author thinks are great.
+        
+        Uninstall
+        ---------
+        
+        pip is able to uninstall most installed packages with ``pip uninstall
+        package-name``.
+        
+        Known exceptions include pure-distutils packages installed with
+        ``python setup.py install`` (such packages leave behind no metadata allowing
+        determination of what files were installed), and script wrappers installed
+        by develop-installs (``python setup.py develop``).
+        
+        pip also performs an automatic uninstall of an old version of a package
+        before upgrading to a newer version, so outdated files (and egg-info data)
+        from conflicting versions aren't left hanging around to cause trouble. The
+        old version of the package is automatically restored if the new version
+        fails to download or install.
+        
+        .. _`requirements file`:
+        
+        Requirements Files
+        ------------------
+        
+        When installing software, and Python packages in particular, it's common that
+        you get a lot of libraries installed.  You just did ``easy_install MyPackage``
+        and you get a dozen packages.  Each of these packages has its own version.
+        
+        Maybe you ran that installation and it works.  Great!  Will it keep working?
+        Did you have to provide special options to get it to find everything?  Did you
+        have to install a bunch of other optional pieces?  Most of all, will you be able
+        to do it again?
+        
+        If you've ever tried to setup an application on a new system, or with slightly
+        updated pieces, and had it fail, pip requirements are for you.  If you
+        haven't had this problem then you will eventually, so pip requirements are
+        for you too -- requirements make explicit, repeatable installation of packages.
+        
+        So what are requirements files?  They are very simple: lists of packages to
+        install.  Instead of running something like ``pip MyApp`` and getting
+        whatever libraries come along, you can create a requirements file something like::
+        
+        MyApp
+        Framework==0.9.4
+        Library>=0.2
+        
+        Then, regardless of what MyApp lists in ``setup.py``, you'll get a specific
+        version of Framework and at least the 0.2 version of Library.  (You might think
+        you could list these specific versions in ``setup.py`` -- try it and you'll
+        quickly see why that doesn't work.)  You can add optional libraries and support
+        tools that MyApp doesn't strictly require.
+        
+        You can also include "editable" packages -- packages that are checked out from
+        Subversion, Git, Mercurial and Bazaar.  These are just like using the ``-e``
+        option to pip.  They look like::
+        
+        -e svn+http://myrepo/svn/MyApp#egg=MyApp
+        
+        You have to start the URL with ``svn+`` (``git+``, ``hg+`` or ``bzr+``), and
+        you have to include ``#egg=Package`` so pip knows what to expect at that URL.
+        You can also include ``@rev`` in the URL, e.g., ``@275`` to check out
+        revision 275.
+        
+        Freezing Requirements
+        ---------------------
+        
+        So you have a working set of packages, and you want to be able to install them
+        elsewhere.  `Requirements files`_ let you install exact versions, but it won't
+        tell you what all the exact versions are.
+        
+        To create a new requirements file from a known working environment, use::
+        
+        $ pip freeze > stable-req.txt
+        
+        This will write a listing of *all* installed libraries to ``stable-req.txt``
+        with exact versions for every library.  You may want to edit the file down after
+        generating (e.g., to eliminate unnecessary libraries), but it'll give you a
+        stable starting point for constructing your requirements file.
+        
+        You can also give it an existing requirements file, and it will use that as a
+        sort of template for the new file.  So if you do::
+        
+        $ pip freeze -r devel-req.txt > stable-req.txt
+        
+        it will keep the packages listed in ``devel-req.txt`` in order and preserve
+        comments.
+        
+        Bundles
+        -------
+        
+        Another way to distribute a set of libraries is a bundle format (specific to
+        pip).  This format is not stable at this time (there simply hasn't been
+        any feedback, nor a great deal of thought).  A bundle file contains all the
+        source for your package, and you can have pip install them all together.
+        Once you have the bundle file further network access won't be necessary.  To
+        build a bundle file, do::
+        
+        $ pip bundle MyApp.pybundle MyApp
+        
+        (Using a `requirements file`_ would be wise.)  Then someone else can get the
+        file ``MyApp.pybundle`` and run::
+        
+        $ pip install MyApp.pybundle
+        
+        This is *not* a binary format.  This only packages source.  If you have binary
+        packages, then the person who installs the files will have to have a compiler,
+        any necessary headers installed, etc.  Binary packages are hard, this is
+        relatively easy.
+        
+        Using pip with virtualenv
+        -------------------------
+        
+        pip is most nutritious when used with `virtualenv
+        <http://pypi.python.org/pypi/virtualenv>`_.  One of the reasons pip
+        doesn't install "multi-version" eggs is that virtualenv removes much of the need
+        for it.
+        
+        pip does not have to be installed to use it, you can run ``python
+        path/to/pip.py`` and it will work.  This is intended to avoid the
+        bootstrapping problem of installation.  You can also run pip inside
+        any virtualenv environment, like::
+        
+        $ virtualenv new-env/
+        ... creates new-env/ ...
+        $ pip install -E new-env/ MyPackage
+        
+        This is exactly equivalent to::
+        
+        $ ./new-env/bin/python path/to/pip.py install MyPackage
+        
+        Except, if you have ``virtualenv`` installed and the path ``new-env/``
+        doesn't exist, then a new virtualenv will be created.
+        
+        pip also has two advanced features for working with virtualenvs -- both of
+        which activated by defining a variable in your environment.
+        
+        To tell pip to only run if there is a virtualenv currently activated,
+        and to bail if not, use::
+        
+        export PIP_REQUIRE_VIRTUALENV=true
+        
+        To tell pip to automatically use the currently active virtualenv::
+        
+        export PIP_RESPECT_VIRTUALENV=true
+        
+        Providing an environment with ``-E`` will be ignored.
+        
+        Using pip with virtualenvwrapper
+        ---------------------------------
+        
+        If you are using `virtualenvwrapper
+        <http://www.doughellmann.com/projects/virtualenvwrapper/>`_, you might
+        want pip to automatically create its virtualenvs in your
+        ``$WORKON_HOME``.
+        
+        You can tell pip to do so by defining ``PIP_VIRTUALENV_BASE`` in your
+        environment and setting it to the same value as that of
+        ``$WORKON_HOME``.
+        
+        Do so by adding the line::
+        
+        export PIP_VIRTUALENV_BASE=$WORKON_HOME
+        
+        in your .bashrc under the line starting with ``export WORKON_HOME``.
+        
+        Using pip with buildout
+        -----------------------
+        
+        If you are using `zc.buildout
+        <http://pypi.python.org/pypi/zc.buildout>`_ you should look at
+        `gp.recipe.pip <http://pypi.python.org/pypi/gp.recipe.pip>`_ as an
+        option to use pip and virtualenv in your buildouts.
+        
+        Command line completion
+        -----------------------
+        
+        pip comes with support for command line completion in bash and zsh and
+        allows you tab complete commands and options. To enable it you simply
+        need copy the required shell script to the your shell startup file
+        (e.g. ``.profile`` or ``.zprofile``) by running the special ``completion``
+        command, e.g. for bash::
+        
+        $ pip completion --bash >> ~/.profile
+        
+        And for zsh::
+        
+        $ pip completion --zsh >> ~/.zprofile
+        
+        Alternatively, you can use the result of the ``completion`` command
+        directly with the eval function of you shell, e.g. by adding::
+        
+        eval `pip completion --bash`
+        
+        to your startup file.
+Keywords: easy_install distutils setuptools egg virtualenv
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Build Tools
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/SOURCES.txt	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,14 @@
+MANIFEST.in
+pip.py
+setup.cfg
+setup.py
+docs/configuration.txt
+docs/index.txt
+docs/license.txt
+docs/news.txt
+docs/requirement-format.txt
+pip.egg-info/PKG-INFO
+pip.egg-info/SOURCES.txt
+pip.egg-info/dependency_links.txt
+pip.egg-info/top_level.txt
+scripts/pip
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/dependency_links.txt	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/not-zip-safe	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/scripts/pip	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,3 @@
+#!/home/amit/SEES-hacks/SEESenv/bin/python
+import pip
+pip.main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/EGG-INFO/top_level.txt	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+pip
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/pip.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,4848 @@
+#!/usr/bin/env python
+import sys
+import os
+import errno
+import stat
+import optparse
+import pkg_resources
+import urllib2
+import urllib
+import mimetypes
+import zipfile
+import tarfile
+import tempfile
+import subprocess
+import posixpath
+import re
+import shutil
+import fnmatch
+import operator
+import copy
+try:
+    from hashlib import md5
+except ImportError:
+    import md5 as md5_module
+    md5 = md5_module.new
+import urlparse
+from email.FeedParser import FeedParser
+import traceback
+from cStringIO import StringIO
+import socket
+from Queue import Queue
+from Queue import Empty as QueueEmpty
+import threading
+import httplib
+import time
+import logging
+import ConfigParser
+from distutils.util import strtobool
+from distutils import sysconfig
+
+class InstallationError(Exception):
+    """General exception during installation"""
+
+class UninstallationError(Exception):
+    """General exception during uninstallation"""
+
+class DistributionNotFound(InstallationError):
+    """Raised when a distribution cannot be found to satisfy a requirement"""
+
+class BadCommand(Exception):
+    """Raised when virtualenv or a command is not found"""
+
+try:
+    any
+except NameError:
+    def any(seq):
+        for item in seq:
+            if item:
+                return True
+        return False
+
+if getattr(sys, 'real_prefix', None):
+    ## FIXME: is build/ a good name?
+    build_prefix = os.path.join(sys.prefix, 'build')
+    src_prefix = os.path.join(sys.prefix, 'src')
+else:
+    ## FIXME: this isn't a very good default
+    build_prefix = os.path.join(os.getcwd(), 'build')
+    src_prefix = os.path.join(os.getcwd(), 'src')
+
+# FIXME doesn't account for venv linked to global site-packages
+
+site_packages = sysconfig.get_python_lib()
+user_dir = os.path.expanduser('~')
+if sys.platform == 'win32':
+    bin_py = os.path.join(sys.prefix, 'Scripts')
+    # buildout uses 'bin' on Windows too?
+    if not os.path.exists(bin_py):
+        bin_py = os.path.join(sys.prefix, 'bin')
+    config_dir = os.environ.get('APPDATA', user_dir) # Use %APPDATA% for roaming
+    default_config_file = os.path.join(config_dir, 'pip', 'pip.ini')
+else:
+    bin_py = os.path.join(sys.prefix, 'bin')
+    default_config_file = os.path.join(user_dir, '.pip', 'pip.conf')
+    # Forcing to use /usr/local/bin for standard Mac OS X framework installs
+    if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
+        bin_py = '/usr/local/bin'
+
+class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
+    """Custom help formatter for use in ConfigOptionParser that updates
+    the defaults before expanding them, allowing them to show up correctly
+    in the help listing"""
+
+    def expand_default(self, option):
+        if self.parser is not None:
+            self.parser.update_defaults(self.parser.defaults)
+        return optparse.IndentedHelpFormatter.expand_default(self, option)
+
+
+class ConfigOptionParser(optparse.OptionParser):
+    """Custom option parser which updates its defaults by by checking the
+    configuration files and environmental variables"""
+
+    def __init__(self, *args, **kwargs):
+        self.config = ConfigParser.RawConfigParser()
+        self.name = kwargs.pop('name')
+        self.files = self.get_config_files()
+        self.config.read(self.files)
+        assert self.name
+        optparse.OptionParser.__init__(self, *args, **kwargs)
+
+    def get_config_files(self):
+        config_file = os.environ.get('PIP_CONFIG_FILE', False)
+        if config_file and os.path.exists(config_file):
+            return [config_file]
+        return [default_config_file]
+
+    def update_defaults(self, defaults):
+        """Updates the given defaults with values from the config files and
+        the environ. Does a little special handling for certain types of
+        options (lists)."""
+        # Then go and look for the other sources of configuration:
+        config = {}
+        # 1. config files
+        for section in ('global', self.name):
+            config.update(dict(self.get_config_section(section)))
+        # 2. environmental variables
+        config.update(dict(self.get_environ_vars()))
+        # Then set the options with those values
+        for key, val in config.iteritems():
+            key = key.replace('_', '-')
+            if not key.startswith('--'):
+                key = '--%s' % key # only prefer long opts
+            option = self.get_option(key)
+            if option is not None:
+                # ignore empty values
+                if not val:
+                    continue
+                # handle multiline configs
+                if option.action == 'append':
+                    val = val.split()
+                else:
+                    option.nargs = 1
+                if option.action in ('store_true', 'store_false', 'count'):
+                    val = strtobool(val)
+                try:
+                    val = option.convert_value(key, val)
+                except optparse.OptionValueError, e:
+                    print ("An error occured during configuration: %s" % e)
+                    sys.exit(3)
+                defaults[option.dest] = val
+        return defaults
+
+    def get_config_section(self, name):
+        """Get a section of a configuration"""
+        if self.config.has_section(name):
+            return self.config.items(name)
+        return []
+
+    def get_environ_vars(self, prefix='PIP_'):
+        """Returns a generator with all environmental vars with prefix PIP_"""
+        for key, val in os.environ.iteritems():
+            if key.startswith(prefix):
+                yield (key.replace(prefix, '').lower(), val)
+
+    def get_default_values(self):
+        """Overridding to make updating the defaults after instantiation of
+        the option parser possible, update_defaults() does the dirty work."""
+        if not self.process_default_values:
+            # Old, pre-Optik 1.5 behaviour.
+            return optparse.Values(self.defaults)
+
+        defaults = self.update_defaults(self.defaults.copy()) # ours
+        for option in self._get_all_options():
+            default = defaults.get(option.dest)
+            if isinstance(default, basestring):
+                opt_str = option.get_opt_string()
+                defaults[option.dest] = option.check_value(opt_str, default)
+        return optparse.Values(defaults)
+
+try:
+    pip_dist = pkg_resources.get_distribution('pip')
+    version = '%s from %s (python %s)' % (
+        pip_dist, pip_dist.location, sys.version[:3])
+except pkg_resources.DistributionNotFound:
+    # when running pip.py without installing
+    version=None
+
+def rmtree_errorhandler(func, path, exc_info):
+    """On Windows, the files in .svn are read-only, so when rmtree() tries to
+    remove them, an exception is thrown.  We catch that here, remove the
+    read-only attribute, and hopefully continue without problems."""
+    exctype, value = exc_info[:2]
+    # lookin for a windows error
+    if exctype is not WindowsError or 'Access is denied' not in str(value):
+        raise
+    # file type should currently be read only
+    if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD):
+        raise
+    # convert to read/write
+    os.chmod(path, stat.S_IWRITE)
+    # use the original function to repeat the operation
+    func(path)
+
+class VcsSupport(object):
+    _registry = {}
+    schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp']
+
+    def __init__(self):
+        # Register more schemes with urlparse for various version control systems
+        urlparse.uses_netloc.extend(self.schemes)
+        urlparse.uses_fragment.extend(self.schemes)
+        super(VcsSupport, self).__init__()
+
+    def __iter__(self):
+        return self._registry.__iter__()
+
+    @property
+    def backends(self):
+        return self._registry.values()
+
+    @property
+    def dirnames(self):
+        return [backend.dirname for backend in self.backends]
+
+    @property
+    def all_schemes(self):
+        schemes = []
+        for backend in self.backends:
+            schemes.extend(backend.schemes)
+        return schemes
+
+    def register(self, cls):
+        if not hasattr(cls, 'name'):
+            logger.warn('Cannot register VCS %s' % cls.__name__)
+            return
+        if cls.name not in self._registry:
+            self._registry[cls.name] = cls
+
+    def unregister(self, cls=None, name=None):
+        if name in self._registry:
+            del self._registry[name]
+        elif cls in self._registry.values():
+            del self._registry[cls.name]
+        else:
+            logger.warn('Cannot unregister because no class or name given')
+
+    def get_backend_name(self, location):
+        """
+        Return the name of the version control backend if found at given
+        location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
+        """
+        for vc_type in self._registry.values():
+            path = os.path.join(location, vc_type.dirname)
+            if os.path.exists(path):
+                return vc_type.name
+        return None
+
+    def get_backend(self, name):
+        name = name.lower()
+        if name in self._registry:
+            return self._registry[name]
+
+    def get_backend_from_location(self, location):
+        vc_type = self.get_backend_name(location)
+        if vc_type:
+            return self.get_backend(vc_type)
+        return None
+
+vcs = VcsSupport()
+
+parser = ConfigOptionParser(
+    usage='%prog COMMAND [OPTIONS]',
+    version=version,
+    add_help_option=False,
+    formatter=UpdatingDefaultsHelpFormatter(),
+    name='global')
+
+parser.add_option(
+    '-h', '--help',
+    dest='help',
+    action='store_true',
+    help='Show help')
+parser.add_option(
+    '-E', '--environment',
+    dest='venv',
+    metavar='DIR',
+    help='virtualenv environment to run pip in (either give the '
+    'interpreter or the environment base directory)')
+parser.add_option(
+    '-s', '--enable-site-packages',
+    dest='site_packages',
+    action='store_true',
+    help='Include site-packages in virtualenv if one is to be '
+    'created. Ignored if --environment is not used or '
+    'the virtualenv already exists.')
+parser.add_option(
+    # Defines a default root directory for virtualenvs, relative
+    # virtualenvs names/paths are considered relative to it.
+    '--virtualenv-base',
+    dest='venv_base',
+    type='str',
+    default='',
+    help=optparse.SUPPRESS_HELP)
+parser.add_option(
+    # Run only if inside a virtualenv, bail if not.
+    '--require-virtualenv', '--require-venv',
+    dest='require_venv',
+    action='store_true',
+    default=False,
+    help=optparse.SUPPRESS_HELP)
+parser.add_option(
+    # Use automatically an activated virtualenv instead of installing
+    # globally. -E will be ignored if used.
+    '--respect-virtualenv', '--respect-venv',
+    dest='respect_venv',
+    action='store_true',
+    default=False,
+    help=optparse.SUPPRESS_HELP)
+
+parser.add_option(
+    '-v', '--verbose',
+    dest='verbose',
+    action='count',
+    default=0,
+    help='Give more output')
+parser.add_option(
+    '-q', '--quiet',
+    dest='quiet',
+    action='count',
+    default=0,
+    help='Give less output')
+parser.add_option(
+    '--log',
+    dest='log',
+    metavar='FILENAME',
+    help='Log file where a complete (maximum verbosity) record will be kept')
+parser.add_option(
+    # Writes the log levels explicitely to the log'
+    '--log-explicit-levels',
+    dest='log_explicit_levels',
+    action='store_true',
+    default=False,
+    help=optparse.SUPPRESS_HELP)
+parser.add_option(
+    # The default log file
+    '--local-log', '--log-file',
+    dest='log_file',
+    metavar='FILENAME',
+    default='./pip-log.txt',
+    help=optparse.SUPPRESS_HELP)
+
+parser.add_option(
+    '--proxy',
+    dest='proxy',
+    type='str',
+    default='',
+    help="Specify a proxy in the form user:passwd@proxy.server:port. "
+    "Note that the user:password@ is optional and required only if you "
+    "are behind an authenticated proxy.  If you provide "
+    "user@proxy.server:port then you will be prompted for a password.")
+parser.add_option(
+    '--timeout', '--default-timeout',
+    metavar='SECONDS',
+    dest='timeout',
+    type='float',
+    default=15,
+    help='Set the socket timeout (default %default seconds)')
+parser.add_option(
+    # The default version control system for editables, e.g. 'svn'
+    '--default-vcs',
+    dest='default_vcs',
+    type='str',
+    default='',
+    help=optparse.SUPPRESS_HELP)
+parser.add_option(
+    # A regex to be used to skip requirements
+    '--skip-requirements-regex',
+    dest='skip_requirements_regex',
+    type='str',
+    default='',
+    help=optparse.SUPPRESS_HELP)
+
+parser.disable_interspersed_args()
+
+_commands = {}
+
+class Command(object):
+    name = None
+    usage = None
+    hidden = False
+    def __init__(self):
+        assert self.name
+        self.parser = ConfigOptionParser(
+            usage=self.usage,
+            prog='%s %s' % (sys.argv[0], self.name),
+            version=parser.version,
+            formatter=UpdatingDefaultsHelpFormatter(),
+            name=self.name)
+        for option in parser.option_list:
+            if not option.dest or option.dest == 'help':
+                # -h, --version, etc
+                continue
+            self.parser.add_option(option)
+        _commands[self.name] = self
+
+    def merge_options(self, initial_options, options):
+        # Make sure we have all global options carried over
+        for attr in ['log', 'venv', 'proxy', 'venv_base', 'require_venv',
+                     'respect_venv', 'log_explicit_levels', 'log_file',
+                     'timeout', 'default_vcs', 'skip_requirements_regex']:
+            setattr(options, attr, getattr(initial_options, attr) or getattr(options, attr))
+        options.quiet += initial_options.quiet
+        options.verbose += initial_options.verbose
+
+    def main(self, complete_args, args, initial_options):
+        global logger
+        options, args = self.parser.parse_args(args)
+        self.merge_options(initial_options, options)
+
+        if options.require_venv and not options.venv:
+            # If a venv is required check if it can really be found
+            if not os.environ.get('VIRTUAL_ENV'):
+                print 'Could not find an activated virtualenv (required).'
+                sys.exit(3)
+            # Automatically install in currently activated venv if required
+            options.respect_venv = True
+
+        if args and args[-1] == '___VENV_RESTART___':
+            ## FIXME: We don't do anything this this value yet:
+            venv_location = args[-2]
+            args = args[:-2]
+            options.venv = None
+        else:
+            # If given the option to respect the activated environment
+            # check if no venv is given as a command line parameter
+            if options.respect_venv and os.environ.get('VIRTUAL_ENV'):
+                if options.venv and os.path.exists(options.venv):
+                    # Make sure command line venv and environmental are the same
+                    if (os.path.realpath(os.path.expanduser(options.venv)) !=
+                            os.path.realpath(os.environ.get('VIRTUAL_ENV'))):
+                        print ("Given virtualenv (%s) doesn't match "
+                               "currently activated virtualenv (%s)."
+                               % (options.venv, os.environ.get('VIRTUAL_ENV')))
+                        sys.exit(3)
+                else:
+                    options.venv = os.environ.get('VIRTUAL_ENV')
+                    print 'Using already activated environment %s' % options.venv
+        level = 1 # Notify
+        level += options.verbose
+        level -= options.quiet
+        level = Logger.level_for_integer(4-level)
+        complete_log = []
+        logger = Logger([(level, sys.stdout),
+                         (Logger.DEBUG, complete_log.append)])
+        if options.log_explicit_levels:
+            logger.explicit_levels = True
+        if options.venv:
+            if options.verbose > 0:
+                # The logger isn't setup yet
+                print 'Running in environment %s' % options.venv
+            site_packages=False
+            if options.site_packages:
+                site_packages=True
+            restart_in_venv(options.venv, options.venv_base, site_packages,
+                            complete_args)
+            # restart_in_venv should actually never return, but for clarity...
+            return
+        ## FIXME: not sure if this sure come before or after venv restart
+        if options.log:
+            log_fp = open_logfile_append(options.log)
+            logger.consumers.append((logger.DEBUG, log_fp))
+        else:
+            log_fp = None
+
+        socket.setdefaulttimeout(options.timeout or None)
+
+        setup_proxy_handler(options.proxy)
+
+        exit = 0
+        try:
+            self.run(options, args)
+        except (InstallationError, UninstallationError), e:
+            logger.fatal(str(e))
+            logger.info('Exception information:\n%s' % format_exc())
+            exit = 1
+        except:
+            logger.fatal('Exception:\n%s' % format_exc())
+            exit = 2
+
+        if log_fp is not None:
+            log_fp.close()
+        if exit:
+            log_fn = options.log_file
+            text = '\n'.join(complete_log)
+            logger.fatal('Storing complete log in %s' % log_fn)
+            log_fp = open_logfile_append(log_fn)
+            log_fp.write(text)
+            log_fp.close()
+        return exit
+
+class HelpCommand(Command):
+    name = 'help'
+    usage = '%prog'
+    summary = 'Show available commands'
+
+    def run(self, options, args):
+        if args:
+            ## FIXME: handle errors better here
+            command = args[0]
+            if command not in _commands:
+                raise InstallationError('No command with the name: %s' % command)
+            command = _commands[command]
+            command.parser.print_help()
+            return
+        parser.print_help()
+        print
+        print 'Commands available:'
+        commands = list(set(_commands.values()))
+        commands.sort(key=lambda x: x.name)
+        for command in commands:
+            if command.hidden:
+                continue
+            print '  %s: %s' % (command.name, command.summary)
+
+HelpCommand()
+
+
+class InstallCommand(Command):
+    name = 'install'
+    usage = '%prog [OPTIONS] PACKAGE_NAMES...'
+    summary = 'Install packages'
+    bundle = False
+
+    def __init__(self):
+        super(InstallCommand, self).__init__()
+        self.parser.add_option(
+            '-e', '--editable',
+            dest='editables',
+            action='append',
+            default=[],
+            metavar='VCS+REPOS_URL[@REV]#egg=PACKAGE',
+            help='Install a package directly from a checkout. Source will be checked '
+            'out into src/PACKAGE (lower-case) and installed in-place (using '
+            'setup.py develop). You can run this on an existing directory/checkout (like '
+            'pip install -e src/mycheckout). This option may be provided multiple times. '
+            'Possible values for VCS are: svn, git, hg and bzr.')
+        self.parser.add_option(
+            '-r', '--requirement',
+            dest='requirements',
+            action='append',
+            default=[],
+            metavar='FILENAME',
+            help='Install all the packages listed in the given requirements file.  '
+            'This option can be used multiple times.')
+        self.parser.add_option(
+            '-f', '--find-links',
+            dest='find_links',
+            action='append',
+            default=[],
+            metavar='URL',
+            help='URL to look for packages at')
+        self.parser.add_option(
+            '-i', '--index-url', '--pypi-url',
+            dest='index_url',
+            metavar='URL',
+            default='http://pypi.python.org/simple',
+            help='Base URL of Python Package Index (default %default)')
+        self.parser.add_option(
+            '--extra-index-url',
+            dest='extra_index_urls',
+            metavar='URL',
+            action='append',
+            default=[],
+            help='Extra URLs of package indexes to use in addition to --index-url')
+        self.parser.add_option(
+            '--no-index',
+            dest='no_index',
+            action='store_true',
+            default=False,
+            help='Ignore package index (only looking at --find-links URLs instead)')
+
+        self.parser.add_option(
+            '-b', '--build', '--build-dir', '--build-directory',
+            dest='build_dir',
+            metavar='DIR',
+            default=None,
+            help='Unpack packages into DIR (default %s) and build from there' % build_prefix)
+        self.parser.add_option(
+            '-d', '--download', '--download-dir', '--download-directory',
+            dest='download_dir',
+            metavar='DIR',
+            default=None,
+            help='Download packages into DIR instead of installing them')
+        self.parser.add_option(
+            '--download-cache',
+            dest='download_cache',
+            metavar='DIR',
+            default=None,
+            help='Cache downloaded packages in DIR')
+        self.parser.add_option(
+            '--src', '--source', '--source-dir', '--source-directory',
+            dest='src_dir',
+            metavar='DIR',
+            default=None,
+            help='Check out --editable packages into DIR (default %s)' % src_prefix)
+
+        self.parser.add_option(
+            '-U', '--upgrade',
+            dest='upgrade',
+            action='store_true',
+            help='Upgrade all packages to the newest available version')
+        self.parser.add_option(
+            '-I', '--ignore-installed',
+            dest='ignore_installed',
+            action='store_true',
+            help='Ignore the installed packages (reinstalling instead)')
+        self.parser.add_option(
+            '--no-deps', '--no-dependencies',
+            dest='ignore_dependencies',
+            action='store_true',
+            default=False,
+            help='Ignore package dependencies')
+        self.parser.add_option(
+            '--no-install',
+            dest='no_install',
+            action='store_true',
+            help="Download and unpack all packages, but don't actually install them")
+
+        self.parser.add_option(
+            '--install-option',
+            dest='install_options',
+            action='append',
+            help="Extra arguments to be supplied to the setup.py install "
+            "command (use like --install-option=\"--install-scripts=/usr/local/bin\").  "
+            "Use multiple --install-option options to pass multiple options to setup.py install.  "
+            "If you are using an option with a directory path, be sure to use absolute path.")
+
+    def run(self, options, args):
+        if not options.build_dir:
+            options.build_dir = build_prefix
+        if not options.src_dir:
+            options.src_dir = src_prefix
+        if options.download_dir:
+            options.no_install = True
+            options.ignore_installed = True
+        else:
+            options.build_dir = os.path.abspath(options.build_dir)
+            options.src_dir = os.path.abspath(options.src_dir)
+        install_options = options.install_options or []
+        index_urls = [options.index_url] + options.extra_index_urls
+        if options.no_index:
+            logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
+            index_urls = []
+        finder = PackageFinder(
+            find_links=options.find_links,
+            index_urls=index_urls)
+        requirement_set = RequirementSet(
+            build_dir=options.build_dir,
+            src_dir=options.src_dir,
+            download_dir=options.download_dir,
+            download_cache=options.download_cache,
+            upgrade=options.upgrade,
+            ignore_installed=options.ignore_installed,
+            ignore_dependencies=options.ignore_dependencies)
+        for name in args:
+            requirement_set.add_requirement(
+                InstallRequirement.from_line(name, None))
+        for name in options.editables:
+            requirement_set.add_requirement(
+                InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
+        for filename in options.requirements:
+            for req in parse_requirements(filename, finder=finder, options=options):
+                requirement_set.add_requirement(req)
+        requirement_set.install_files(finder, force_root_egg_info=self.bundle)
+        if not options.no_install and not self.bundle:
+            requirement_set.install(install_options)
+            installed = ' '.join([req.name for req in
+                                  requirement_set.successfully_installed])
+            if installed:
+                logger.notify('Successfully installed %s' % installed)
+        elif not self.bundle:
+            downloaded = ' '.join([req.name for req in
+                                   requirement_set.successfully_downloaded])
+            if downloaded:
+                logger.notify('Successfully downloaded %s' % downloaded)
+        return requirement_set
+
+InstallCommand()
+
+class UninstallCommand(Command):
+    name = 'uninstall'
+    usage = '%prog [OPTIONS] PACKAGE_NAMES ...'
+    summary = 'Uninstall packages'
+
+    def __init__(self):
+        super(UninstallCommand, self).__init__()
+        self.parser.add_option(
+            '-r', '--requirement',
+            dest='requirements',
+            action='append',
+            default=[],
+            metavar='FILENAME',
+            help='Uninstall all the packages listed in the given requirements file.  '
+            'This option can be used multiple times.')
+        self.parser.add_option(
+            '-y', '--yes',
+            dest='yes',
+            action='store_true',
+            help="Don't ask for confirmation of uninstall deletions.")
+
+    def run(self, options, args):
+        requirement_set = RequirementSet(
+            build_dir=None,
+            src_dir=None,
+            download_dir=None)
+        for name in args:
+            requirement_set.add_requirement(
+                InstallRequirement.from_line(name))
+        for filename in options.requirements:
+            for req in parse_requirements(filename, options=options):
+                requirement_set.add_requirement(req)
+        requirement_set.uninstall(auto_confirm=options.yes)
+
+UninstallCommand()
+
+class BundleCommand(InstallCommand):
+    name = 'bundle'
+    usage = '%prog [OPTIONS] BUNDLE_NAME.pybundle PACKAGE_NAMES...'
+    summary = 'Create pybundles (archives containing multiple packages)'
+    bundle = True
+
+    def __init__(self):
+        super(BundleCommand, self).__init__()
+
+    def run(self, options, args):
+        if not args:
+            raise InstallationError('You must give a bundle filename')
+        if not options.build_dir:
+            options.build_dir = backup_dir(build_prefix, '-bundle')
+        if not options.src_dir:
+            options.src_dir = backup_dir(src_prefix, '-bundle')
+        # We have to get everything when creating a bundle:
+        options.ignore_installed = True
+        logger.notify('Putting temporary build files in %s and source/develop files in %s'
+                      % (display_path(options.build_dir), display_path(options.src_dir)))
+        bundle_filename = args[0]
+        args = args[1:]
+        requirement_set = super(BundleCommand, self).run(options, args)
+        # FIXME: here it has to do something
+        requirement_set.create_bundle(bundle_filename)
+        logger.notify('Created bundle in %s' % bundle_filename)
+        return requirement_set
+
+BundleCommand()
+
+
+class FreezeCommand(Command):
+    name = 'freeze'
+    usage = '%prog [OPTIONS]'
+    summary = 'Output all currently installed packages (exact versions) to stdout'
+
+    def __init__(self):
+        super(FreezeCommand, self).__init__()
+        self.parser.add_option(
+            '-r', '--requirement',
+            dest='requirement',
+            action='store',
+            default=None,
+            metavar='FILENAME',
+            help='Use the given requirements file as a hint about how to generate the new frozen requirements')
+        self.parser.add_option(
+            '-f', '--find-links',
+            dest='find_links',
+            action='append',
+            default=[],
+            metavar='URL',
+            help='URL for finding packages, which will be added to the frozen requirements file')
+
+    def run(self, options, args):
+        requirement = options.requirement
+        find_links = options.find_links or []
+        ## FIXME: Obviously this should be settable:
+        find_tags = False
+        skip_match = None
+
+        skip_regex = options.skip_requirements_regex
+        if skip_regex:
+            skip_match = re.compile(skip_regex)
+
+        logger.move_stdout_to_stderr()
+        dependency_links = []
+
+        f = sys.stdout
+
+        for dist in pkg_resources.working_set:
+            if dist.has_metadata('dependency_links.txt'):
+                dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
+        for link in find_links:
+            if '#egg=' in link:
+                dependency_links.append(link)
+        for link in find_links:
+            f.write('-f %s\n' % link)
+        installations = {}
+        for dist in pkg_resources.working_set:
+            if dist.key in ('setuptools', 'pip', 'python'):
+                ## FIXME: also skip virtualenv?
+                continue
+            req = FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
+            installations[req.name] = req
+        if requirement:
+            req_f = open(requirement)
+            for line in req_f:
+                if not line.strip() or line.strip().startswith('#'):
+                    f.write(line)
+                    continue
+                if skip_match and skip_match.search(line):
+                    f.write(line)
+                    continue
+                elif line.startswith('-e') or line.startswith('--editable'):
+                    if line.startswith('-e'):
+                        line = line[2:].strip()
+                    else:
+                        line = line[len('--editable'):].strip().lstrip('=')
+                    line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
+                elif (line.startswith('-r') or line.startswith('--requirement')
+                      or line.startswith('-Z') or line.startswith('--always-unzip')):
+                    logger.debug('Skipping line %r' % line.strip())
+                    continue
+                else:
+                    line_req = InstallRequirement.from_line(line)
+                if not line_req.name:
+                    logger.notify("Skipping line because it's not clear what it would install: %s"
+                                  % line.strip())
+                    logger.notify("  (add #egg=PackageName to the URL to avoid this warning)")
+                    continue
+                if line_req.name not in installations:
+                    logger.warn("Requirement file contains %s, but that package is not installed"
+                                % line.strip())
+                    continue
+                f.write(str(installations[line_req.name]))
+                del installations[line_req.name]
+            f.write('## The following requirements were added by pip --freeze:\n')
+        for installation in sorted(installations.values(), key=lambda x: x.name):
+            f.write(str(installation))
+
+FreezeCommand()
+
+class ZipCommand(Command):
+    name = 'zip'
+    usage = '%prog [OPTIONS] PACKAGE_NAMES...'
+    summary = 'Zip individual packages'
+
+    def __init__(self):
+        super(ZipCommand, self).__init__()
+        if self.name == 'zip':
+            self.parser.add_option(
+                '--unzip',
+                action='store_true',
+                dest='unzip',
+                help='Unzip (rather than zip) a package')
+        else:
+            self.parser.add_option(
+                '--zip',
+                action='store_false',
+                dest='unzip',
+                default=True,
+                help='Zip (rather than unzip) a package')
+        self.parser.add_option(
+            '--no-pyc',
+            action='store_true',
+            dest='no_pyc',
+            help='Do not include .pyc files in zip files (useful on Google App Engine)')
+        self.parser.add_option(
+            '-l', '--list',
+            action='store_true',
+            dest='list',
+            help='List the packages available, and their zip status')
+        self.parser.add_option(
+            '--sort-files',
+            action='store_true',
+            dest='sort_files',
+            help='With --list, sort packages according to how many files they contain')
+        self.parser.add_option(
+            '--path',
+            action='append',
+            dest='paths',
+            help='Restrict operations to the given paths (may include wildcards)')
+        self.parser.add_option(
+            '-n', '--simulate',
+            action='store_true',
+            help='Do not actually perform the zip/unzip operation')
+
+    def paths(self):
+        """All the entries of sys.path, possibly restricted by --path"""
+        if not self.select_paths:
+            return sys.path
+        result = []
+        match_any = set()
+        for path in sys.path:
+            path = os.path.normcase(os.path.abspath(path))
+            for match in self.select_paths:
+                match = os.path.normcase(os.path.abspath(match))
+                if '*' in match:
+                    if re.search(fnmatch.translate(match+'*'), path):
+                        result.append(path)
+                        match_any.add(match)
+                        break
+                else:
+                    if path.startswith(match):
+                        result.append(path)
+                        match_any.add(match)
+                        break
+            else:
+                logger.debug("Skipping path %s because it doesn't match %s"
+                             % (path, ', '.join(self.select_paths)))
+        for match in self.select_paths:
+            if match not in match_any and '*' not in match:
+                result.append(match)
+                logger.debug("Adding path %s because it doesn't match anything already on sys.path"
+                             % match)
+        return result
+
+    def run(self, options, args):
+        self.select_paths = options.paths
+        self.simulate = options.simulate
+        if options.list:
+            return self.list(options, args)
+        if not args:
+            raise InstallationError(
+                'You must give at least one package to zip or unzip')
+        packages = []
+        for arg in args:
+            module_name, filename = self.find_package(arg)
+            if options.unzip and os.path.isdir(filename):
+                raise InstallationError(
+                    'The module %s (in %s) is not a zip file; cannot be unzipped'
+                    % (module_name, filename))
+            elif not options.unzip and not os.path.isdir(filename):
+                raise InstallationError(
+                    'The module %s (in %s) is not a directory; cannot be zipped'
+                    % (module_name, filename))
+            packages.append((module_name, filename))
+        last_status = None
+        for module_name, filename in packages:
+            if options.unzip:
+                last_status = self.unzip_package(module_name, filename)
+            else:
+                last_status = self.zip_package(module_name, filename, options.no_pyc)
+        return last_status
+
+    def unzip_package(self, module_name, filename):
+        zip_filename = os.path.dirname(filename)
+        if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
+            raise InstallationError(
+                'Module %s (in %s) isn\'t located in a zip file in %s'
+                % (module_name, filename, zip_filename))
+        package_path = os.path.dirname(zip_filename)
+        if not package_path in self.paths():
+            logger.warn(
+                'Unpacking %s into %s, but %s is not on sys.path'
+                % (display_path(zip_filename), display_path(package_path),
+                   display_path(package_path)))
+        logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename)))
+        if self.simulate:
+            logger.notify('Skipping remaining operations because of --simulate')
+            return
+        logger.indent += 2
+        try:
+            ## FIXME: this should be undoable:
+            zip = zipfile.ZipFile(zip_filename)
+            to_save = []
+            for name in zip.namelist():
+                if name.startswith('%s/' % module_name):
+                    content = zip.read(name)
+                    dest = os.path.join(package_path, name)
+                    if not os.path.exists(os.path.dirname(dest)):
+                        os.makedirs(os.path.dirname(dest))
+                    if not content and dest.endswith('/'):
+                        if not os.path.exists(dest):
+                            os.makedirs(dest)
+                    else:
+                        f = open(dest, 'wb')
+                        f.write(content)
+                        f.close()
+                else:
+                    to_save.append((name, zip.read(name)))
+            zip.close()
+            if not to_save:
+                logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
+                os.unlink(zip_filename)
+                self.remove_filename_from_pth(zip_filename)
+            else:
+                logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
+                zip = zipfile.ZipFile(zip_filename, 'w')
+                for name, content in to_save:
+                    zip.writestr(name, content)
+                zip.close()
+        finally:
+            logger.indent -= 2
+
+    def zip_package(self, module_name, filename, no_pyc):
+        orig_filename = filename
+        logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
+        logger.indent += 2
+        if filename.endswith('.egg'):
+            dest_filename = filename
+        else:
+            dest_filename = filename + '.zip'
+        try:
+            ## FIXME: I think this needs to be undoable:
+            if filename == dest_filename:
+                filename = backup_dir(orig_filename)
+                logger.notify('Moving %s aside to %s' % (orig_filename, filename))
+                if not self.simulate:
+                    shutil.move(orig_filename, filename)
+            try:
+                logger.info('Creating zip file in %s' % display_path(dest_filename))
+                if not self.simulate:
+                    zip = zipfile.ZipFile(dest_filename, 'w')
+                    zip.writestr(module_name + '/', '')
+                    for dirpath, dirnames, filenames in os.walk(filename):
+                        if no_pyc:
+                            filenames = [f for f in filenames
+                                         if not f.lower().endswith('.pyc')]
+                        for fns, is_dir in [(dirnames, True), (filenames, False)]:
+                            for fn in fns:
+                                full = os.path.join(dirpath, fn)
+                                dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn)
+                                if is_dir:
+                                    zip.writestr(dest+'/', '')
+                                else:
+                                    zip.write(full, dest)
+                    zip.close()
+                logger.info('Removing old directory %s' % display_path(filename))
+                if not self.simulate:
+                    shutil.rmtree(filename)
+            except:
+                ## FIXME: need to do an undo here
+                raise
+            ## FIXME: should also be undone:
+            self.add_filename_to_pth(dest_filename)
+        finally:
+            logger.indent -= 2
+
+    def remove_filename_from_pth(self, filename):
+        for pth in self.pth_files():
+            f = open(pth, 'r')
+            lines = f.readlines()
+            f.close()
+            new_lines = [
+                l for l in lines if l.strip() != filename]
+            if lines != new_lines:
+                logger.info('Removing reference to %s from .pth file %s'
+                            % (display_path(filename), display_path(pth)))
+                if not filter(None, new_lines):
+                    logger.info('%s file would be empty: deleting' % display_path(pth))
+                    if not self.simulate:
+                        os.unlink(pth)
+                else:
+                    if not self.simulate:
+                        f = open(pth, 'w')
+                        f.writelines(new_lines)
+                        f.close()
+                return
+        logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename))
+
+    def add_filename_to_pth(self, filename):
+        path = os.path.dirname(filename)
+        dest = os.path.join(path, filename + '.pth')
+        if path not in self.paths():
+            logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest))
+        if not self.simulate:
+            if os.path.exists(dest):
+                f = open(dest)
+                lines = f.readlines()
+                f.close()
+                if lines and not lines[-1].endswith('\n'):
+                    lines[-1] += '\n'
+                lines.append(filename+'\n')
+            else:
+                lines = [filename + '\n']
+            f = open(dest, 'w')
+            f.writelines(lines)
+            f.close()
+
+    def pth_files(self):
+        for path in self.paths():
+            if not os.path.exists(path) or not os.path.isdir(path):
+                continue
+            for filename in os.listdir(path):
+                if filename.endswith('.pth'):
+                    yield os.path.join(path, filename)
+
+    def find_package(self, package):
+        for path in self.paths():
+            full = os.path.join(path, package)
+            if os.path.exists(full):
+                return package, full
+            if not os.path.isdir(path) and zipfile.is_zipfile(path):
+                zip = zipfile.ZipFile(path, 'r')
+                try:
+                    zip.read('%s/__init__.py' % package)
+                except KeyError:
+                    pass
+                else:
+                    zip.close()
+                    return package, full
+                zip.close()
+        ## FIXME: need special error for package.py case:
+        raise InstallationError(
+            'No package with the name %s found' % package)
+
+    def list(self, options, args):
+        if args:
+            raise InstallationError(
+                'You cannot give an argument with --list')
+        for path in sorted(self.paths()):
+            if not os.path.exists(path):
+                continue
+            basename = os.path.basename(path.rstrip(os.path.sep))
+            if os.path.isfile(path) and zipfile.is_zipfile(path):
+                if os.path.dirname(path) not in self.paths():
+                    logger.notify('Zipped egg: %s' % display_path(path))
+                continue
+            if (basename != 'site-packages'
+                and not path.replace('\\', '/').endswith('lib/python')):
+                continue
+            logger.notify('In %s:' % display_path(path))
+            logger.indent += 2
+            zipped = []
+            unzipped = []
+            try:
+                for filename in sorted(os.listdir(path)):
+                    ext = os.path.splitext(filename)[1].lower()
+                    if ext in ('.pth', '.egg-info', '.egg-link'):
+                        continue
+                    if ext == '.py':
+                        logger.info('Not displaying %s: not a package' % display_path(filename))
+                        continue
+                    full = os.path.join(path, filename)
+                    if os.path.isdir(full):
+                        unzipped.append((filename, self.count_package(full)))
+                    elif zipfile.is_zipfile(full):
+                        zipped.append(filename)
+                    else:
+                        logger.info('Unknown file: %s' % display_path(filename))
+                if zipped:
+                    logger.notify('Zipped packages:')
+                    logger.indent += 2
+                    try:
+                        for filename in zipped:
+                            logger.notify(filename)
+                    finally:
+                        logger.indent -= 2
+                else:
+                    logger.notify('No zipped packages.')
+                if unzipped:
+                    if options.sort_files:
+                        unzipped.sort(key=lambda x: -x[1])
+                    logger.notify('Unzipped packages:')
+                    logger.indent += 2
+                    try:
+                        for filename, count in unzipped:
+                            logger.notify('%s  (%i files)' % (filename, count))
+                    finally:
+                        logger.indent -= 2
+                else:
+                    logger.notify('No unzipped packages.')
+            finally:
+                logger.indent -= 2
+
+    def count_package(self, path):
+        total = 0
+        for dirpath, dirnames, filenames in os.walk(path):
+            filenames = [f for f in filenames
+                         if not f.lower().endswith('.pyc')]
+            total += len(filenames)
+        return total
+
+ZipCommand()
+
+class UnzipCommand(ZipCommand):
+    name = 'unzip'
+    summary = 'Unzip individual packages'
+
+UnzipCommand()
+
+BASE_COMPLETION = """
+# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
+"""
+
+COMPLETION_SCRIPTS = {
+    'bash': """
+_pip_completion()
+{
+    COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
+                   COMP_CWORD=$COMP_CWORD \\
+                   PIP_AUTO_COMPLETE=1 $1 ) )
+}
+complete -o default -F _pip_completion pip
+""", 'zsh': """
+function _pip_completion {
+  local words cword
+  read -Ac words
+  read -cn cword
+  reply=( $( COMP_WORDS="$words[*]" \\ 
+             COMP_CWORD=$(( cword-1 )) \\
+             PIP_AUTO_COMPLETE=1 $words[1] ) )
+}
+compctl -K _pip_completion pip
+"""
+}
+
+class CompletionCommand(Command):
+    name = 'completion'
+    summary = 'A helper command to be used for command completion'
+    hidden = True
+
+    def __init__(self):
+        super(CompletionCommand, self).__init__()
+        self.parser.add_option(
+            '--bash', '-b',
+            action='store_const',
+            const='bash',
+            dest='shell',
+            help='Emit completion code for bash')
+        self.parser.add_option(
+            '--zsh', '-z',
+            action='store_const',
+            const='zsh',
+            dest='shell',
+            help='Emit completion code for zsh')
+
+    def run(self, options, args):
+        """Prints the completion code of the given shell"""
+        if options.shell in ('bash', 'zsh'):
+            script = COMPLETION_SCRIPTS.get(options.shell, '')
+            print BASE_COMPLETION % {'script': script, 'shell': options.shell}
+        else:
+            print 'ERROR: You must pass --bash or --zsh'
+
+CompletionCommand()
+
+def autocomplete():
+    """Command and option completion for the main option parser (and options)
+    and its subcommands (and options).
+
+    Enable by sourcing one of the completion shell scripts (bash or zsh).
+    """
+    # Don't complete if user hasn't sourced bash_completion file.
+    if not os.environ.has_key('PIP_AUTO_COMPLETE'):
+        return
+    cwords = os.environ['COMP_WORDS'].split()[1:]
+    cword = int(os.environ['COMP_CWORD'])
+    try:
+        current = cwords[cword-1]
+    except IndexError:
+        current = ''
+    subcommands = [cmd for cmd, cls in _commands.items() if not cls.hidden]
+    options = []
+    # subcommand
+    if cword == 1:
+        # show options of main parser only when necessary
+        if current.startswith('-') or current.startswith('--'):
+            subcommands += [opt.get_opt_string()
+                            for opt in parser.option_list
+                            if opt.help != optparse.SUPPRESS_HELP]
+        print ' '.join(filter(lambda x: x.startswith(current), subcommands))
+    # subcommand options
+    # special case: the 'help' subcommand has no options
+    elif cwords[0] in subcommands and cwords[0] != 'help':
+        subcommand = _commands.get(cwords[0])
+        options += [(opt.get_opt_string(), opt.nargs)
+                    for opt in subcommand.parser.option_list
+                    if opt.help != optparse.SUPPRESS_HELP]
+        # filter out previously specified options from available options
+        prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
+        options = filter(lambda (x, v): x not in prev_opts, options)
+        # filter options by current input
+        options = [(k, v) for k, v in options if k.startswith(current)]
+        for option in options:
+            opt_label = option[0]
+            # append '=' to options which require args
+            if option[1]:
+                opt_label += '='
+            print opt_label
+    sys.exit(1)
+
+def main(initial_args=None):
+    if initial_args is None:
+        initial_args = sys.argv[1:]
+    autocomplete()
+    options, args = parser.parse_args(initial_args)
+    if options.help and not args:
+        args = ['help']
+    if not args:
+        parser.error('You must give a command (use "pip help" see a list of commands)')
+    command = args[0].lower()
+    ## FIXME: search for a command match?
+    if command not in _commands:
+        parser.error('No command by the name %(script)s %(arg)s\n  (maybe you meant "%(script)s install %(arg)s")'
+                     % dict(script=os.path.basename(sys.argv[0]), arg=command))
+    command = _commands[command]
+    return command.main(initial_args, args[1:], options)
+
+def get_proxy(proxystr=''):
+    """Get the proxy given the option passed on the command line.  If an
+    empty string is passed it looks at the HTTP_PROXY environment
+    variable."""
+    if not proxystr:
+        proxystr = os.environ.get('HTTP_PROXY', '')
+    if proxystr:
+        if '@' in proxystr:
+            user_password, server_port = proxystr.split('@', 1)
+            if ':' in user_password:
+                user, password = user_password.split(':', 1)
+            else:
+                user = user_password
+                import getpass
+                prompt = 'Password for %s@%s: ' % (user, server_port)
+                password = urllib.quote(getpass.getpass(prompt))
+            return '%s:%s@%s' % (user, password, server_port)
+        else:
+            return proxystr
+    else:
+        return None
+
+def setup_proxy_handler(proxystr=''):
+    """Set the proxy handler given the option passed on the command
+    line.  If an empty string is passed it looks at the HTTP_PROXY
+    environment variable.  """
+    proxy = get_proxy(proxystr)
+    if proxy:
+        proxy_support = urllib2.ProxyHandler({"http": proxy, "ftp": proxy})
+        opener = urllib2.build_opener(proxy_support, urllib2.CacheFTPHandler)
+        urllib2.install_opener(opener)
+
+def format_exc(exc_info=None):
+    if exc_info is None:
+        exc_info = sys.exc_info()
+    out = StringIO()
+    traceback.print_exception(*exc_info, **dict(file=out))
+    return out.getvalue()
+
+def restart_in_venv(venv, base, site_packages, args):
+    """
+    Restart this script using the interpreter in the given virtual environment
+    """
+    if base and not os.path.isabs(venv) and not venv.startswith('~'):
+        base = os.path.expanduser(base)
+        # ensure we have an abs basepath at this point:
+        #    a relative one makes no sense (or does it?)
+        if os.path.isabs(base):
+            venv = os.path.join(base, venv)
+
+    if venv.startswith('~'):
+        venv = os.path.expanduser(venv)
+
+    if not os.path.exists(venv):
+        try:
+            import virtualenv
+        except ImportError:
+            print 'The virtual environment does not exist: %s' % venv
+            print 'and virtualenv is not installed, so a new environment cannot be created'
+            sys.exit(3)
+        print 'Creating new virtualenv environment in %s' % venv
+        virtualenv.logger = logger
+        logger.indent += 2
+        virtualenv.create_environment(venv, site_packages=site_packages)
+    if sys.platform == 'win32':
+        python = os.path.join(venv, 'Scripts', 'python.exe')
+        # check for bin directory which is used in buildouts
+        if not os.path.exists(python):
+            python = os.path.join(venv, 'bin', 'python.exe')
+    else:
+        python = os.path.join(venv, 'bin', 'python')
+    if not os.path.exists(python):
+        python = venv
+    if not os.path.exists(python):
+        raise BadCommand('Cannot find virtual environment interpreter at %s' % python)
+    base = os.path.dirname(os.path.dirname(python))
+    file = __file__
+    if file.endswith('.pyc'):
+        file = file[:-1]
+    proc = subprocess.Popen(
+        [python, file] + args + [base, '___VENV_RESTART___'])
+    proc.wait()
+    sys.exit(proc.returncode)
+
+class PackageFinder(object):
+    """This finds packages.
+
+    This is meant to match easy_install's technique for looking for
+    packages, by reading pages and looking for appropriate links
+    """
+
+    failure_limit = 3
+
+    def __init__(self, find_links, index_urls):
+        self.find_links = find_links
+        self.index_urls = index_urls
+        self.dependency_links = []
+        self.cache = PageCache()
+        # These are boring links that have already been logged somehow:
+        self.logged_links = set()
+
+    def add_dependency_links(self, links):
+        ## FIXME: this shouldn't be global list this, it should only
+        ## apply to requirements of the package that specifies the
+        ## dependency_links value
+        ## FIXME: also, we should track comes_from (i.e., use Link)
+        self.dependency_links.extend(links)
+
+    def find_requirement(self, req, upgrade):
+        url_name = req.url_name
+        # Only check main index if index URL is given:
+        main_index_url = None
+        if self.index_urls:
+            # Check that we have the url_name correctly spelled:
+            main_index_url = Link(posixpath.join(self.index_urls[0], url_name))
+            # This will also cache the page, so it's okay that we get it again later:
+            page = self._get_page(main_index_url, req)
+            if page is None:
+                url_name = self._find_url_name(Link(self.index_urls[0]), url_name, req) or req.url_name
+        def mkurl_pypi_url(url):
+            loc =  posixpath.join(url, url_name)
+            # For maximum compatibility with easy_install, ensure the path
+            # ends in a trailing slash.  Although this isn't in the spec
+            # (and PyPI can handle it without the slash) some other index
+            # implementations might break if they relied on easy_install's behavior.
+            if not loc.endswith('/'):
+                loc = loc + '/'
+            return loc
+        if url_name is not None:
+            locations = [
+                mkurl_pypi_url(url)
+                for url in self.index_urls] + self.find_links
+        else:
+            locations = list(self.find_links)
+        locations.extend(self.dependency_links)
+        for version in req.absolute_versions:
+            if url_name is not None and main_index_url is not None:
+                locations = [
+                    posixpath.join(main_index_url.url, version)] + locations
+        file_locations = []
+        url_locations = []
+        for url in locations:
+            if url.startswith('file:'):
+                fn = url_to_filename(url)
+                if os.path.isdir(fn):
+                    path = os.path.realpath(fn)
+                    for item in os.listdir(path):
+                        file_locations.append(
+                            filename_to_url2(os.path.join(path, item)))
+                elif os.path.isfile(fn):
+                    file_locations.append(filename_to_url2(fn))
+            else:
+                url_locations.append(url)
+
+        locations = [Link(url) for url in url_locations]
+        logger.debug('URLs to search for versions for %s:' % req)
+        for location in locations:
+            logger.debug('* %s' % location)
+        found_versions = []
+        found_versions.extend(
+            self._package_versions(
+                [Link(url, '-f') for url in self.find_links], req.name.lower()))
+        page_versions = []
+        for page in self._get_pages(locations, req):
+            logger.debug('Analyzing links from page %s' % page.url)
+            logger.indent += 2
+            try:
+                page_versions.extend(self._package_versions(page.links, req.name.lower()))
+            finally:
+                logger.indent -= 2
+        dependency_versions = list(self._package_versions(
+            [Link(url) for url in self.dependency_links], req.name.lower()))
+        if dependency_versions:
+            logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
+        file_versions = list(self._package_versions(
+                [Link(url) for url in file_locations], req.name.lower()))
+        if not found_versions and not page_versions and not dependency_versions and not file_versions:
+            logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
+            raise DistributionNotFound('No distributions at all found for %s' % req)
+        if req.satisfied_by is not None:
+            found_versions.append((req.satisfied_by.parsed_version, Inf, req.satisfied_by.version))
+        if file_versions:
+            file_versions.sort(reverse=True)
+            logger.info('Local files found: %s' % ', '.join([url_to_filename(link.url) for parsed, link, version in file_versions]))
+            found_versions = file_versions + found_versions
+        all_versions = found_versions + page_versions + dependency_versions
+        applicable_versions = []
+        for (parsed_version, link, version) in all_versions:
+            if version not in req.req:
+                logger.info("Ignoring link %s, version %s doesn't match %s"
+                            % (link, version, ','.join([''.join(s) for s in req.req.specs])))
+                continue
+            applicable_versions.append((link, version))
+        applicable_versions = sorted(applicable_versions, key=operator.itemgetter(1),
+            cmp=lambda x, y : cmp(pkg_resources.parse_version(y), pkg_resources.parse_version(x))
+        )
+        existing_applicable = bool([link for link, version in applicable_versions if link is Inf])
+        if not upgrade and existing_applicable:
+            if applicable_versions[0][1] is Inf:
+                logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
+                            % req.satisfied_by.version)
+            else:
+                logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
+                            % (req.satisfied_by.version, applicable_versions[0][1]))
+            return None
+        if not applicable_versions:
+            logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
+                         % (req, ', '.join([version for parsed_version, link, version in found_versions])))
+            raise DistributionNotFound('No distributions matching the version for %s' % req)
+        if applicable_versions[0][0] is Inf:
+            # We have an existing version, and its the best version
+            logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
+                        % (req.satisfied_by.version, ', '.join([version for link, version in applicable_versions[1:]]) or 'none'))
+            return None
+        if len(applicable_versions) > 1:
+            logger.info('Using version %s (newest of versions: %s)' %
+                        (applicable_versions[0][1], ', '.join([version for link, version in applicable_versions])))
+        return applicable_versions[0][0]
+
+    def _find_url_name(self, index_url, url_name, req):
+        """Finds the true URL name of a package, when the given name isn't quite correct.
+        This is usually used to implement case-insensitivity."""
+        if not index_url.url.endswith('/'):
+            # Vaguely part of the PyPI API... weird but true.
+            ## FIXME: bad to modify this?
+            index_url.url += '/'
+        page = self._get_page(index_url, req)
+        if page is None:
+            logger.fatal('Cannot fetch index base URL %s' % index_url)
+            return
+        norm_name = normalize_name(req.url_name)
+        for link in page.links:
+            base = posixpath.basename(link.path.rstrip('/'))
+            if norm_name == normalize_name(base):
+                logger.notify('Real name of requirement %s is %s' % (url_name, base))
+                return base
+        return None
+
+    def _get_pages(self, locations, req):
+        """Yields (page, page_url) from the given locations, skipping
+        locations that have errors, and adding download/homepage links"""
+        pending_queue = Queue()
+        for location in locations:
+            pending_queue.put(location)
+        done = []
+        seen = set()
+        threads = []
+        for i in range(min(10, len(locations))):
+            t = threading.Thread(target=self._get_queued_page, args=(req, pending_queue, done, seen))
+            t.setDaemon(True)
+            threads.append(t)
+            t.start()
+        for t in threads:
+            t.join()
+        return done
+
+    _log_lock = threading.Lock()
+
+    def _get_queued_page(self, req, pending_queue, done, seen):
+        while 1:
+            try:
+                location = pending_queue.get(False)
+            except QueueEmpty:
+                return
+            if location in seen:
+                continue
+            seen.add(location)
+            page = self._get_page(location, req)
+            if page is None:
+                continue
+            done.append(page)
+            for link in page.rel_links():
+                pending_queue.put(link)
+
+    _egg_fragment_re = re.compile(r'#egg=([^&]*)')
+    _egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
+    _py_version_re = re.compile(r'-py([123]\.[0-9])$')
+
+    def _sort_links(self, links):
+        "Brings links in order, non-egg links first, egg links second"
+        eggs, no_eggs = [], []
+        for link in links:
+            if link.egg_fragment:
+                eggs.append(link)
+            else:
+                no_eggs.append(link)
+        return no_eggs + eggs
+
+    def _package_versions(self, links, search_name):
+        seen_links = {}
+        for link in self._sort_links(links):
+            if link.url in seen_links:
+                continue
+            seen_links[link.url] = None
+            if link.egg_fragment:
+                egg_info = link.egg_fragment
+            else:
+                path = link.path
+                egg_info, ext = link.splitext()
+                if not ext:
+                    if link not in self.logged_links:
+                        logger.debug('Skipping link %s; not a file' % link)
+                        self.logged_links.add(link)
+                    continue
+                if egg_info.endswith('.tar'):
+                    # Special double-extension case:
+                    egg_info = egg_info[:-4]
+                    ext = '.tar' + ext
+                if ext not in ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip'):
+                    if link not in self.logged_links:
+                        logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
+                        self.logged_links.add(link)
+                    continue
+            version = self._egg_info_matches(egg_info, search_name, link)
+            if version is None:
+                logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
+                continue
+            match = self._py_version_re.search(version)
+            if match:
+                version = version[:match.start()]
+                py_version = match.group(1)
+                if py_version != sys.version[:3]:
+                    logger.debug('Skipping %s because Python version is incorrect' % link)
+                    continue
+            logger.debug('Found link %s, version: %s' % (link, version))
+            yield (pkg_resources.parse_version(version),
+                   link,
+                   version)
+
+    def _egg_info_matches(self, egg_info, search_name, link):
+        match = self._egg_info_re.search(egg_info)
+        if not match:
+            logger.debug('Could not parse version from link: %s' % link)
+            return None
+        name = match.group(0).lower()
+        # To match the "safe" name that pkg_resources creates:
+        name = name.replace('_', '-')
+        if name.startswith(search_name.lower()):
+            return match.group(0)[len(search_name):].lstrip('-')
+        else:
+            return None
+
+    def _get_page(self, link, req):
+        return HTMLPage.get_page(link, req, cache=self.cache)
+
+
+class InstallRequirement(object):
+
+    def __init__(self, req, comes_from, source_dir=None, editable=False,
+                 url=None, update=True):
+        if isinstance(req, basestring):
+            req = pkg_resources.Requirement.parse(req)
+        self.req = req
+        self.comes_from = comes_from
+        self.source_dir = source_dir
+        self.editable = editable
+        self.url = url
+        self._egg_info_path = None
+        # This holds the pkg_resources.Distribution object if this requirement
+        # is already available:
+        self.satisfied_by = None
+        # This hold the pkg_resources.Distribution object if this requirement
+        # conflicts with another installed distribution:
+        self.conflicts_with = None
+        self._temp_build_dir = None
+        self._is_bundle = None
+        # True if the editable should be updated:
+        self.update = update
+        # Set to True after successful installation
+        self.install_succeeded = None
+        # UninstallPathSet of uninstalled distribution (for possible rollback)
+        self.uninstalled = None
+
+    @classmethod
+    def from_editable(cls, editable_req, comes_from=None, default_vcs=None):
+        name, url = parse_editable(editable_req, default_vcs)
+        if url.startswith('file:'):
+            source_dir = url_to_filename(url)
+        else:
+            source_dir = None
+        return cls(name, comes_from, source_dir=source_dir, editable=True, url=url)
+
+    @classmethod
+    def from_line(cls, name, comes_from=None):
+        """Creates an InstallRequirement from a name, which might be a
+        requirement, filename, or URL.
+        """
+        url = None
+        name = name.strip()
+        req = name
+        if is_url(name):
+            url = name
+            ## FIXME: I think getting the requirement here is a bad idea:
+            #req = get_requirement_from_url(url)
+            req = None
+        elif is_filename(name):
+            if not os.path.exists(name):
+                logger.warn('Requirement %r looks like a filename, but the file does not exist'
+                            % name)
+            url = filename_to_url(name)
+            #req = get_requirement_from_url(url)
+            req = None
+        return cls(req, comes_from, url=url)
+
+    def __str__(self):
+        if self.req:
+            s = str(self.req)
+            if self.url:
+                s += ' from %s' % self.url
+        else:
+            s = self.url
+        if self.satisfied_by is not None:
+            s += ' in %s' % display_path(self.satisfied_by.location)
+        if self.comes_from:
+            if isinstance(self.comes_from, basestring):
+                comes_from = self.comes_from
+            else:
+                comes_from = self.comes_from.from_path()
+            if comes_from:
+                s += ' (from %s)' % comes_from
+        return s
+
+    def from_path(self):
+        if self.req is None:
+            return None
+        s = str(self.req)
+        if self.comes_from:
+            if isinstance(self.comes_from, basestring):
+                comes_from = self.comes_from
+            else:
+                comes_from = self.comes_from.from_path()
+            if comes_from:
+                s += '->' + comes_from
+        return s
+
+    def build_location(self, build_dir, unpack=True):
+        if self._temp_build_dir is not None:
+            return self._temp_build_dir
+        if self.req is None:
+            self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-')
+            self._ideal_build_dir = build_dir
+            return self._temp_build_dir
+        if self.editable:
+            name = self.name.lower()
+        else:
+            name = self.name
+        # FIXME: Is there a better place to create the build_dir? (hg and bzr need this)
+        if not os.path.exists(build_dir):
+            os.makedirs(build_dir)
+        return os.path.join(build_dir, name)
+
+    def correct_build_location(self):
+        """If the build location was a temporary directory, this will move it
+        to a new more permanent location"""
+        if self.source_dir is not None:
+            return
+        assert self.req is not None
+        assert self._temp_build_dir
+        old_location = self._temp_build_dir
+        new_build_dir = self._ideal_build_dir
+        del self._ideal_build_dir
+        if self.editable:
+            name = self.name.lower()
+        else:
+            name = self.name
+        new_location = os.path.join(new_build_dir, name)
+        if not os.path.exists(new_build_dir):
+            logger.debug('Creating directory %s' % new_build_dir)
+            os.makedirs(new_build_dir)
+        if os.path.exists(new_location):
+            raise InstallationError(
+                'A package already exists in %s; please remove it to continue'
+                % display_path(new_location))
+        logger.debug('Moving package %s from %s to new location %s'
+                     % (self, display_path(old_location), display_path(new_location)))
+        shutil.move(old_location, new_location)
+        self._temp_build_dir = new_location
+        self.source_dir = new_location
+        self._egg_info_path = None
+
+    @property
+    def name(self):
+        if self.req is None:
+            return None
+        return self.req.project_name
+
+    @property
+    def url_name(self):
+        if self.req is None:
+            return None
+        return urllib.quote(self.req.unsafe_name)
+
+    @property
+    def setup_py(self):
+        return os.path.join(self.source_dir, 'setup.py')
+
+    def run_egg_info(self, force_root_egg_info=False):
+        assert self.source_dir
+        if self.name:
+            logger.notify('Running setup.py egg_info for package %s' % self.name)
+        else:
+            logger.notify('Running setup.py egg_info for package from %s' % self.url)
+        logger.indent += 2
+        try:
+            script = self._run_setup_py
+            script = script.replace('__SETUP_PY__', repr(self.setup_py))
+            script = script.replace('__PKG_NAME__', repr(self.name))
+            # We can't put the .egg-info files at the root, because then the source code will be mistaken
+            # for an installed egg, causing problems
+            if self.editable or force_root_egg_info:
+                egg_base_option = []
+            else:
+                egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info')
+                if not os.path.exists(egg_info_dir):
+                    os.makedirs(egg_info_dir)
+                egg_base_option = ['--egg-base', 'pip-egg-info']
+            call_subprocess(
+                [sys.executable, '-c', script, 'egg_info'] + egg_base_option,
+                cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False,
+                command_level=Logger.VERBOSE_DEBUG,
+                command_desc='python setup.py egg_info')
+        finally:
+            logger.indent -= 2
+        if not self.req:
+            self.req = pkg_resources.Requirement.parse(self.pkg_info()['Name'])
+            self.correct_build_location()
+
+    ## FIXME: This is a lame hack, entirely for PasteScript which has
+    ## a self-provided entry point that causes this awkwardness
+    _run_setup_py = """
+__file__ = __SETUP_PY__
+from setuptools.command import egg_info
+def replacement_run(self):
+    self.mkpath(self.egg_info)
+    installer = self.distribution.fetch_build_egg
+    for ep in egg_info.iter_entry_points('egg_info.writers'):
+        # require=False is the change we're making:
+        writer = ep.load(require=False)
+        if writer:
+            writer(self, ep.name, egg_info.os.path.join(self.egg_info,ep.name))
+    self.find_sources()
+egg_info.egg_info.run = replacement_run
+execfile(__file__)
+"""
+
+    def egg_info_data(self, filename):
+        if self.satisfied_by is not None:
+            if not self.satisfied_by.has_metadata(filename):
+                return None
+            return self.satisfied_by.get_metadata(filename)
+        assert self.source_dir
+        filename = self.egg_info_path(filename)
+        if not os.path.exists(filename):
+            return None
+        fp = open(filename, 'r')
+        data = fp.read()
+        fp.close()
+        return data
+
+    def egg_info_path(self, filename):
+        if self._egg_info_path is None:
+            if self.editable:
+                base = self.source_dir
+            else:
+                base = os.path.join(self.source_dir, 'pip-egg-info')
+            filenames = os.listdir(base)
+            if self.editable:
+                filenames = []
+                for root, dirs, files in os.walk(base):
+                    for dir in vcs.dirnames:
+                        if dir in dirs:
+                            dirs.remove(dir)
+                    filenames.extend([os.path.join(root, dir)
+                                     for dir in dirs])
+                filenames = [f for f in filenames if f.endswith('.egg-info')]
+            assert filenames, "No files/directories in %s (from %s)" % (base, filename)
+            assert len(filenames) == 1, "Unexpected files/directories in %s: %s" % (base, ' '.join(filenames))
+            self._egg_info_path = os.path.join(base, filenames[0])
+        return os.path.join(self._egg_info_path, filename)
+
+    def egg_info_lines(self, filename):
+        data = self.egg_info_data(filename)
+        if not data:
+            return []
+        result = []
+        for line in data.splitlines():
+            line = line.strip()
+            if not line or line.startswith('#'):
+                continue
+            result.append(line)
+        return result
+
+    def pkg_info(self):
+        p = FeedParser()
+        data = self.egg_info_data('PKG-INFO')
+        if not data:
+            logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO')))
+        p.feed(data or '')
+        return p.close()
+
+    @property
+    def dependency_links(self):
+        return self.egg_info_lines('dependency_links.txt')
+
+    _requirements_section_re = re.compile(r'\[(.*?)\]')
+
+    def requirements(self, extras=()):
+        in_extra = None
+        for line in self.egg_info_lines('requires.txt'):
+            match = self._requirements_section_re.match(line)
+            if match:
+                in_extra = match.group(1)
+                continue
+            if in_extra and in_extra not in extras:
+                # Skip requirement for an extra we aren't requiring
+                continue
+            yield line
+
+    @property
+    def absolute_versions(self):
+        for qualifier, version in self.req.specs:
+            if qualifier == '==':
+                yield version
+
+    @property
+    def installed_version(self):
+        return self.pkg_info()['version']
+
+    def assert_source_matches_version(self):
+        assert self.source_dir
+        if self.comes_from is None:
+            # We don't check the versions of things explicitly installed.
+            # This makes, e.g., "pip Package==dev" possible
+            return
+        version = self.installed_version
+        if version not in self.req:
+            logger.fatal(
+                'Source in %s has the version %s, which does not match the requirement %s'
+                % (display_path(self.source_dir), version, self))
+            raise InstallationError(
+                'Source in %s has version %s that conflicts with %s'
+                % (display_path(self.source_dir), version, self))
+        else:
+            logger.debug('Source in %s has version %s, which satisfies requirement %s'
+                         % (display_path(self.source_dir), version, self))
+
+    def update_editable(self, obtain=True):
+        if not self.url:
+            logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir)
+            return
+        assert self.editable
+        assert self.source_dir
+        if self.url.startswith('file:'):
+            # Static paths don't get updated
+            return
+        assert '+' in self.url, "bad url: %r" % self.url
+        if not self.update:
+            return
+        vc_type, url = self.url.split('+', 1)
+        backend = vcs.get_backend(vc_type)
+        if backend:
+            vcs_backend = backend(self.url)
+            if obtain:
+                vcs_backend.obtain(self.source_dir)
+            else:
+                vcs_backend.export(self.source_dir)
+        else:
+            assert 0, (
+                'Unexpected version control type (in %s): %s'
+                % (self.url, vc_type))
+
+    def uninstall(self, auto_confirm=False):
+        """
+        Uninstall the distribution currently satisfying this requirement.
+
+        Prompts before removing or modifying files unless
+        ``auto_confirm`` is True.
+
+        Refuses to delete or modify files outside of ``sys.prefix`` -
+        thus uninstallation within a virtual environment can only
+        modify that virtual environment, even if the virtualenv is
+        linked to global site-packages.
+
+        """
+        if not self.check_if_exists():
+            raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))
+        dist = self.satisfied_by or self.conflicts_with
+        paths_to_remove = UninstallPathSet(dist, sys.prefix)
+
+        pip_egg_info_path = os.path.join(dist.location,
+                                         dist.egg_name()) + '.egg-info'
+        easy_install_egg = dist.egg_name() + '.egg'
+        # This won't find a globally-installed develop egg if
+        # we're in a virtualenv.
+        # (There doesn't seem to be any metadata in the
+        # Distribution object for a develop egg that points back
+        # to its .egg-link and easy-install.pth files).  That's
+        # OK, because we restrict ourselves to making changes
+        # within sys.prefix anyway.
+        develop_egg_link = os.path.join(site_packages,
+                                        dist.project_name) + '.egg-link'
+        if os.path.exists(pip_egg_info_path):
+            # package installed by pip
+            paths_to_remove.add(pip_egg_info_path)
+            if dist.has_metadata('installed-files.txt'):
+                for installed_file in dist.get_metadata('installed-files.txt').splitlines():
+                    path = os.path.normpath(os.path.join(pip_egg_info_path, installed_file))
+                    if os.path.exists(path):
+                        paths_to_remove.add(path)
+            if dist.has_metadata('top_level.txt'):
+                for top_level_pkg in [p for p
+                                      in dist.get_metadata('top_level.txt').splitlines()
+                                      if p]:
+                    path = os.path.join(dist.location, top_level_pkg)
+                    if os.path.exists(path):
+                        paths_to_remove.add(path)
+                    elif os.path.exists(path + '.py'):
+                        paths_to_remove.add(path + '.py')
+                        if os.path.exists(path + '.pyc'):
+                            paths_to_remove.add(path + '.pyc')
+
+        elif dist.location.endswith(easy_install_egg):
+            # package installed by easy_install
+            paths_to_remove.add(dist.location)
+            easy_install_pth = os.path.join(os.path.dirname(dist.location),
+                                            'easy-install.pth')
+            paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
+
+        elif os.path.isfile(develop_egg_link):
+            # develop egg
+            fh = open(develop_egg_link, 'r')
+            link_pointer = os.path.normcase(fh.readline().strip())
+            fh.close()
+            assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
+            paths_to_remove.add(develop_egg_link)
+            easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
+                                            'easy-install.pth')
+            paths_to_remove.add_pth(easy_install_pth, dist.location)
+            # fix location (so we can uninstall links to sources outside venv)
+            paths_to_remove.location = develop_egg_link
+
+        # find distutils scripts= scripts
+        if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
+            for script in dist.metadata_listdir('scripts'):
+                paths_to_remove.add(os.path.join(bin_py, script))
+                if sys.platform == 'win32':
+                    paths_to_remove.add(os.path.join(bin_py, script) + '.bat')
+
+        # find console_scripts
+        if dist.has_metadata('entry_points.txt'):
+            config = ConfigParser.SafeConfigParser()
+            config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
+            if config.has_section('console_scripts'):
+                for name, value in config.items('console_scripts'):
+                    paths_to_remove.add(os.path.join(bin_py, name))
+                    if sys.platform == 'win32':
+                        paths_to_remove.add(os.path.join(bin_py, name) + '.exe')
+                        paths_to_remove.add(os.path.join(bin_py, name) + '-script.py')
+
+        paths_to_remove.remove(auto_confirm)
+        self.uninstalled = paths_to_remove
+
+    def rollback_uninstall(self):
+        if self.uninstalled:
+            self.uninstalled.rollback()
+        else:
+            logger.error("Can't rollback %s, nothing uninstalled."
+                         % (self.project_name,))
+
+    def archive(self, build_dir):
+        assert self.source_dir
+        create_archive = True
+        archive_name = '%s-%s.zip' % (self.name, self.installed_version)
+        archive_path = os.path.join(build_dir, archive_name)
+        if os.path.exists(archive_path):
+            response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup '
+                           % display_path(archive_path), ('i', 'w', 'b'))
+            if response == 'i':
+                create_archive = False
+            elif response == 'w':
+                logger.warn('Deleting %s' % display_path(archive_path))
+                os.remove(archive_path)
+            elif response == 'b':
+                dest_file = backup_dir(archive_path)
+                logger.warn('Backing up %s to %s'
+                            % (display_path(archive_path), display_path(dest_file)))
+                shutil.move(archive_path, dest_file)
+        if create_archive:
+            zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED)
+            dir = os.path.normcase(os.path.abspath(self.source_dir))
+            for dirpath, dirnames, filenames in os.walk(dir):
+                if 'pip-egg-info' in dirnames:
+                    dirnames.remove('pip-egg-info')
+                for dirname in dirnames:
+                    dirname = os.path.join(dirpath, dirname)
+                    name = self._clean_zip_name(dirname, dir)
+                    zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
+                    zipdir.external_attr = 0755 << 16L
+                    zip.writestr(zipdir, '')
+                for filename in filenames:
+                    if filename == 'pip-delete-this-directory.txt':
+                        continue
+                    filename = os.path.join(dirpath, filename)
+                    name = self._clean_zip_name(filename, dir)
+                    zip.write(filename, self.name + '/' + name)
+            zip.close()
+            logger.indent -= 2
+            logger.notify('Saved %s' % display_path(archive_path))
+
+    def _clean_zip_name(self, name, prefix):
+        assert name.startswith(prefix+'/'), (
+            "name %r doesn't start with prefix %r" % (name, prefix))
+        name = name[len(prefix)+1:]
+        name = name.replace(os.path.sep, '/')
+        return name
+
+    def install(self, install_options):
+        if self.editable:
+            self.install_editable()
+            return
+        temp_location = tempfile.mkdtemp('-record', 'pip-')
+        record_filename = os.path.join(temp_location, 'install-record.txt')
+        ## FIXME: I'm not sure if this is a reasonable location; probably not
+        ## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable
+        header_dir = os.path.join(os.path.dirname(os.path.dirname(self.source_dir)), 'lib', 'include')
+        logger.notify('Running setup.py install for %s' % self.name)
+        logger.indent += 2
+        try:
+            call_subprocess(
+                [sys.executable, '-c',
+                 "import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py),
+                 'install', '--single-version-externally-managed', '--record', record_filename,
+                 '--install-headers', header_dir] + install_options,
+                cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False)
+        finally:
+            logger.indent -= 2
+        self.install_succeeded = True
+        f = open(record_filename)
+        for line in f:
+            line = line.strip()
+            if line.endswith('.egg-info'):
+                egg_info_dir = line
+                break
+        else:
+            logger.warn('Could not find .egg-info directory in install record for %s' % self)
+            ## FIXME: put the record somewhere
+            return
+        f.close()
+        new_lines = []
+        f = open(record_filename)
+        for line in f:
+            filename = line.strip()
+            if os.path.isdir(filename):
+                filename += os.path.sep
+            new_lines.append(make_path_relative(filename, egg_info_dir))
+        f.close()
+        f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
+        f.write('\n'.join(new_lines)+'\n')
+        f.close()
+
+    def remove_temporary_source(self):
+        """Remove the source files from this requirement, if they are marked
+        for deletion"""
+        if self.is_bundle or os.path.exists(self.delete_marker_filename):
+            logger.info('Removing source in %s' % self.source_dir)
+            if self.source_dir:
+                shutil.rmtree(self.source_dir, ignore_errors=True, onerror=rmtree_errorhandler)
+            self.source_dir = None
+            if self._temp_build_dir and os.path.exists(self._temp_build_dir):
+                shutil.rmtree(self._temp_build_dir, ignore_errors=True, onerror=rmtree_errorhandler)
+            self._temp_build_dir = None
+
+    def install_editable(self):
+        logger.notify('Running setup.py develop for %s' % self.name)
+        logger.indent += 2
+        try:
+            ## FIXME: should we do --install-headers here too?
+            call_subprocess(
+                [sys.executable, '-c',
+                 "import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py),
+                 'develop', '--no-deps'], cwd=self.source_dir, filter_stdout=self._filter_install,
+                show_stdout=False)
+        finally:
+            logger.indent -= 2
+        self.install_succeeded = True
+
+    def _filter_install(self, line):
+        level = Logger.NOTIFY
+        for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*',
+                      r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$",
+                      r'^byte-compiling ',
+                      # Not sure what this warning is, but it seems harmless:
+                      r"^warning: manifest_maker: standard file '-c' not found$"]:
+            if re.search(regex, line.strip()):
+                level = Logger.INFO
+                break
+        return (level, line)
+
+    def check_if_exists(self):
+        """Find an installed distribution that satisfies or conflicts
+        with this requirement, and set self.satisfied_by or
+        self.conflicts_with appropriately."""
+        if self.req is None:
+            return False
+        try:
+            self.satisfied_by = pkg_resources.get_distribution(self.req)
+        except pkg_resources.DistributionNotFound:
+            return False
+        except pkg_resources.VersionConflict:
+            self.conflicts_with = pkg_resources.get_distribution(self.req.project_name)
+        return True
+
+    @property
+    def is_bundle(self):
+        if self._is_bundle is not None:
+            return self._is_bundle
+        base = self._temp_build_dir
+        if not base:
+            ## FIXME: this doesn't seem right:
+            return False
+        self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt'))
+                           or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt')))
+        return self._is_bundle
+
+    def bundle_requirements(self):
+        for dest_dir in self._bundle_editable_dirs:
+            package = os.path.basename(dest_dir)
+            ## FIXME: svnism:
+            for vcs_backend in vcs.backends:
+                url = rev = None
+                vcs_bundle_file = os.path.join(
+                    dest_dir, vcs_backend.bundle_file)
+                if os.path.exists(vcs_bundle_file):
+                    vc_type = vcs_backend.name
+                    fp = open(vcs_bundle_file)
+                    content = fp.read()
+                    fp.close()
+                    url, rev = vcs_backend().parse_vcs_bundle_file(content)
+                    break
+            if url:
+                url = '%s+%s@%s' % (vc_type, url, rev)
+            else:
+                url = None
+            yield InstallRequirement(
+                package, self, editable=True, url=url,
+                update=False, source_dir=dest_dir)
+        for dest_dir in self._bundle_build_dirs:
+            package = os.path.basename(dest_dir)
+            yield InstallRequirement(
+                package, self,
+                source_dir=dest_dir)
+
+    def move_bundle_files(self, dest_build_dir, dest_src_dir):
+        base = self._temp_build_dir
+        assert base
+        src_dir = os.path.join(base, 'src')
+        build_dir = os.path.join(base, 'build')
+        bundle_build_dirs = []
+        bundle_editable_dirs = []
+        for source_dir, dest_dir, dir_collection in [
+            (src_dir, dest_src_dir, bundle_editable_dirs),
+            (build_dir, dest_build_dir, bundle_build_dirs)]:
+            if os.path.exists(source_dir):
+                for dirname in os.listdir(source_dir):
+                    dest = os.path.join(dest_dir, dirname)
+                    dir_collection.append(dest)
+                    if os.path.exists(dest):
+                        logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s'
+                                    % (dest, dirname, self))
+                        continue
+                    if not os.path.exists(dest_dir):
+                        logger.info('Creating directory %s' % dest_dir)
+                        os.makedirs(dest_dir)
+                    shutil.move(os.path.join(source_dir, dirname), dest)
+                if not os.listdir(source_dir):
+                    os.rmdir(source_dir)
+        self._temp_build_dir = None
+        self._bundle_build_dirs = bundle_build_dirs
+        self._bundle_editable_dirs = bundle_editable_dirs
+
+    @property
+    def delete_marker_filename(self):
+        assert self.source_dir
+        return os.path.join(self.source_dir, 'pip-delete-this-directory.txt')
+
+DELETE_MARKER_MESSAGE = '''\
+This file is placed here by pip to indicate the source was put
+here by pip.
+
+Once this package is successfully installed this source code will be
+deleted (unless you remove this file).
+'''
+
+class RequirementSet(object):
+
+    def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
+                 upgrade=False, ignore_installed=False,
+                 ignore_dependencies=False):
+        self.build_dir = build_dir
+        self.src_dir = src_dir
+        self.download_dir = download_dir
+        self.download_cache = download_cache
+        self.upgrade = upgrade
+        self.ignore_installed = ignore_installed
+        self.requirements = {}
+        # Mapping of alias: real_name
+        self.requirement_aliases = {}
+        self.unnamed_requirements = []
+        self.ignore_dependencies = ignore_dependencies
+        self.successfully_downloaded = []
+        self.successfully_installed = []
+
+    def __str__(self):
+        reqs = [req for req in self.requirements.values()
+                if not req.comes_from]
+        reqs.sort(key=lambda req: req.name.lower())
+        return ' '.join([str(req.req) for req in reqs])
+
+    def add_requirement(self, install_req):
+        name = install_req.name
+        if not name:
+            self.unnamed_requirements.append(install_req)
+        else:
+            if self.has_requirement(name):
+                raise InstallationError(
+                    'Double requirement given: %s (aready in %s, name=%r)'
+                    % (install_req, self.get_requirement(name), name))
+            self.requirements[name] = install_req
+            ## FIXME: what about other normalizations?  E.g., _ vs. -?
+            if name.lower() != name:
+                self.requirement_aliases[name.lower()] = name
+
+    def has_requirement(self, project_name):
+        for name in project_name, project_name.lower():
+            if name in self.requirements or name in self.requirement_aliases:
+                return True
+        return False
+
+    @property
+    def is_download(self):
+        if self.download_dir:
+            self.download_dir = os.path.expanduser(self.download_dir)
+            if os.path.exists(self.download_dir):
+                return True
+            else:
+                logger.fatal('Could not find download directory')
+                raise InstallationError(
+                    "Could not find or access download directory '%s'"
+                    % display_path(self.download_dir))
+        return False
+
+    def get_requirement(self, project_name):
+        for name in project_name, project_name.lower():
+            if name in self.requirements:
+                return self.requirements[name]
+            if name in self.requirement_aliases:
+                return self.requirements[self.requirement_aliases[name]]
+        raise KeyError("No project with the name %r" % project_name)
+
+    def uninstall(self, auto_confirm=False):
+        for req in self.requirements.values():
+            req.uninstall(auto_confirm=auto_confirm)
+
+    def install_files(self, finder, force_root_egg_info=False):
+        unnamed = list(self.unnamed_requirements)
+        reqs = self.requirements.values()
+        while reqs or unnamed:
+            if unnamed:
+                req_to_install = unnamed.pop(0)
+            else:
+                req_to_install = reqs.pop(0)
+            install = True
+            if not self.ignore_installed and not req_to_install.editable:
+                req_to_install.check_if_exists()
+                if req_to_install.satisfied_by:
+                    if self.upgrade:
+                        req_to_install.conflicts_with = req_to_install.satisfied_by
+                        req_to_install.satisfied_by = None
+                    else:
+                        install = False
+                if req_to_install.satisfied_by:
+                    logger.notify('Requirement already satisfied '
+                                  '(use --upgrade to upgrade): %s'
+                                  % req_to_install)
+            if req_to_install.editable:
+                logger.notify('Obtaining %s' % req_to_install)
+            elif install:
+                if req_to_install.url and req_to_install.url.lower().startswith('file:'):
+                    logger.notify('Unpacking %s' % display_path(url_to_filename(req_to_install.url)))
+                else:
+                    logger.notify('Downloading/unpacking %s' % req_to_install)
+            logger.indent += 2
+            is_bundle = False
+            try:
+                if req_to_install.editable:
+                    if req_to_install.source_dir is None:
+                        location = req_to_install.build_location(self.src_dir)
+                        req_to_install.source_dir = location
+                    else:
+                        location = req_to_install.source_dir
+                    if not os.path.exists(self.build_dir):
+                        os.makedirs(self.build_dir)
+                    req_to_install.update_editable(not self.is_download)
+                    if self.is_download:
+                        req_to_install.run_egg_info()
+                        req_to_install.archive(self.download_dir)
+                    else:
+                        req_to_install.run_egg_info()
+                elif install:
+                    location = req_to_install.build_location(self.build_dir, not self.is_download)
+                    ## FIXME: is the existance of the checkout good enough to use it?  I don't think so.
+                    unpack = True
+                    if not os.path.exists(os.path.join(location, 'setup.py')):
+                        ## FIXME: this won't upgrade when there's an existing package unpacked in `location`
+                        if req_to_install.url is None:
+                            url = finder.find_requirement(req_to_install, upgrade=self.upgrade)
+                        else:
+                            ## FIXME: should req_to_install.url already be a link?
+                            url = Link(req_to_install.url)
+                            assert url
+                        if url:
+                            try:
+                                self.unpack_url(url, location, self.is_download)
+                            except urllib2.HTTPError, e:
+                                logger.fatal('Could not install requirement %s because of error %s'
+                                             % (req_to_install, e))
+                                raise InstallationError(
+                                    'Could not install requirement %s because of HTTP error %s for URL %s'
+                                    % (req_to_install, e, url))
+                        else:
+                            unpack = False
+                    if unpack:
+                        is_bundle = req_to_install.is_bundle
+                        url = None
+                        if is_bundle:
+                            req_to_install.move_bundle_files(self.build_dir, self.src_dir)
+                            for subreq in req_to_install.bundle_requirements():
+                                reqs.append(subreq)
+                                self.add_requirement(subreq)
+                        elif self.is_download:
+                            req_to_install.source_dir = location
+                            if url and url.scheme in vcs.all_schemes:
+                                req_to_install.run_egg_info()
+                                req_to_install.archive(self.download_dir)
+                        else:
+                            req_to_install.source_dir = location
+                            req_to_install.run_egg_info()
+                            if force_root_egg_info:
+                                # We need to run this to make sure that the .egg-info/
+                                # directory is created for packing in the bundle
+                                req_to_install.run_egg_info(force_root_egg_info=True)
+                            req_to_install.assert_source_matches_version()
+                            f = open(req_to_install.delete_marker_filename, 'w')
+                            f.write(DELETE_MARKER_MESSAGE)
+                            f.close()
+                if not is_bundle and not self.is_download:
+                    ## FIXME: shouldn't be globally added:
+                    finder.add_dependency_links(req_to_install.dependency_links)
+                    ## FIXME: add extras in here:
+                    if not self.ignore_dependencies:
+                        for req in req_to_install.requirements():
+                            try:
+                                name = pkg_resources.Requirement.parse(req).project_name
+                            except ValueError, e:
+                                ## FIXME: proper warning
+                                logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install))
+                                continue
+                            if self.has_requirement(name):
+                                ## FIXME: check for conflict
+                                continue
+                            subreq = InstallRequirement(req, req_to_install)
+                            reqs.append(subreq)
+                            self.add_requirement(subreq)
+                    if req_to_install.name not in self.requirements:
+                        self.requirements[req_to_install.name] = req_to_install
+                else:
+                    req_to_install.remove_temporary_source()
+                if install:
+                    self.successfully_downloaded.append(req_to_install)
+            finally:
+                logger.indent -= 2
+
+    def unpack_url(self, link, location, only_download=False):
+        if only_download:
+            location = self.download_dir
+        for backend in vcs.backends:
+            if link.scheme in backend.schemes:
+                vcs_backend = backend(link.url)
+                if only_download:
+                    vcs_backend.export(location)
+                else:
+                    vcs_backend.unpack(location)
+                return
+        dir = tempfile.mkdtemp()
+        if link.url.lower().startswith('file:'):
+            source = url_to_filename(link.url)
+            content_type = mimetypes.guess_type(source)[0]
+            self.unpack_file(source, location, content_type, link)
+            return
+        md5_hash = link.md5_hash
+        target_url = link.url.split('#', 1)[0]
+        target_file = None
+        if self.download_cache:
+            if not os.path.isdir(self.download_cache):
+                logger.indent -= 2
+                logger.notify('Creating supposed download cache at %s' % self.download_cache)
+                logger.indent += 2
+                os.makedirs(self.download_cache)
+            target_file = os.path.join(self.download_cache,
+                                       urllib.quote(target_url, ''))
+        if (target_file and os.path.exists(target_file)
+            and os.path.exists(target_file+'.content-type')):
+            fp = open(target_file+'.content-type')
+            content_type = fp.read().strip()
+            fp.close()
+            if md5_hash:
+                download_hash = md5()
+                fp = open(target_file, 'rb')
+                while 1:
+                    chunk = fp.read(4096)
+                    if not chunk:
+                        break
+                    download_hash.update(chunk)
+                fp.close()
+            temp_location = target_file
+            logger.notify('Using download cache from %s' % target_file)
+        else:
+            try:
+                resp = urllib2.urlopen(target_url)
+            except urllib2.HTTPError, e:
+                logger.fatal("HTTP error %s while getting %s" % (e.code, link))
+                raise
+            except IOError, e:
+                # Typically an FTP error
+                logger.fatal("Error %s while getting %s" % (e, link))
+                raise
+            content_type = resp.info()['content-type']
+            filename = link.filename
+            ext = splitext(filename)[1]
+            if not ext:
+                ext = mimetypes.guess_extension(content_type)
+                if ext:
+                    filename += ext
+            if not ext and link.url != resp.geturl():
+                ext = os.path.splitext(resp.geturl())[1]
+                if ext:
+                    filename += ext
+            temp_location = os.path.join(dir, filename)
+            fp = open(temp_location, 'wb')
+            if md5_hash:
+                download_hash = md5()
+            try:
+                total_length = int(resp.info()['content-length'])
+            except (ValueError, KeyError):
+                total_length = 0
+            downloaded = 0
+            show_progress = total_length > 40*1000 or not total_length
+            show_url = link.show_url
+            try:
+                if show_progress:
+                    ## FIXME: the URL can get really long in this message:
+                    if total_length:
+                        logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
+                    else:
+                        logger.start_progress('Downloading %s (unknown size): ' % show_url)
+                else:
+                    logger.notify('Downloading %s' % show_url)
+                logger.debug('Downloading from URL %s' % link)
+                while 1:
+                    chunk = resp.read(4096)
+                    if not chunk:
+                        break
+                    downloaded += len(chunk)
+                    if show_progress:
+                        if not total_length:
+                            logger.show_progress('%s' % format_size(downloaded))
+                        else:
+                            logger.show_progress('%3i%%  %s' % (100*downloaded/total_length, format_size(downloaded)))
+                    if md5_hash:
+                        download_hash.update(chunk)
+                    fp.write(chunk)
+                fp.close()
+            finally:
+                if show_progress:
+                    logger.end_progress('%s downloaded' % format_size(downloaded))
+        if md5_hash:
+            download_hash = download_hash.hexdigest()
+            if download_hash != md5_hash:
+                logger.fatal("MD5 hash of the package %s (%s) doesn't match the expected hash %s!"
+                             % (link, download_hash, md5_hash))
+                raise InstallationError('Bad MD5 hash for package %s' % link)
+        if only_download:
+            self.copy_file(temp_location, location, content_type, link)
+        else:
+            self.unpack_file(temp_location, location, content_type, link)
+        if target_file and target_file != temp_location:
+            logger.notify('Storing download in cache at %s' % display_path(target_file))
+            shutil.copyfile(temp_location, target_file)
+            fp = open(target_file+'.content-type', 'w')
+            fp.write(content_type)
+            fp.close()
+            os.unlink(temp_location)
+        if target_file is None:
+            os.unlink(temp_location)
+
+    def copy_file(self, filename, location, content_type, link):
+        copy = True
+        download_location = os.path.join(location, link.filename)
+        if os.path.exists(download_location):
+            response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup '
+                           % display_path(download_location), ('i', 'w', 'b'))
+            if response == 'i':
+                copy = False
+            elif response == 'w':
+                logger.warn('Deleting %s' % display_path(download_location))
+                os.remove(download_location)
+            elif response == 'b':
+                dest_file = backup_dir(download_location)
+                logger.warn('Backing up %s to %s'
+                            % (display_path(download_location), display_path(dest_file)))
+                shutil.move(download_location, dest_file)
+        if copy:
+            shutil.copy(filename, download_location)
+            logger.indent -= 2
+            logger.notify('Saved %s' % display_path(download_location))
+
+    def unpack_file(self, filename, location, content_type, link):
+        if (content_type == 'application/zip'
+            or filename.endswith('.zip')
+            or filename.endswith('.pybundle')
+            or zipfile.is_zipfile(filename)):
+            self.unzip_file(filename, location, flatten=not filename.endswith('.pybundle'))
+        elif (content_type == 'application/x-gzip'
+              or tarfile.is_tarfile(filename)
+              or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
+            self.untar_file(filename, location)
+        elif (content_type and content_type.startswith('text/html')
+              and is_svn_page(file_contents(filename))):
+            # We don't really care about this
+            Subversion('svn+' + link.url).unpack(location)
+        else:
+            ## FIXME: handle?
+            ## FIXME: magic signatures?
+            logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
+                         % (filename, location, content_type))
+            raise InstallationError('Cannot determine archive format of %s' % location)
+
+    def unzip_file(self, filename, location, flatten=True):
+        """Unzip the file (zip file located at filename) to the destination
+        location"""
+        if not os.path.exists(location):
+            os.makedirs(location)
+        zipfp = open(filename, 'rb')
+        try:
+            zip = zipfile.ZipFile(zipfp)
+            leading = has_leading_dir(zip.namelist()) and flatten
+            for name in zip.namelist():
+                data = zip.read(name)
+                fn = name
+                if leading:
+                    fn = split_leading_dir(name)[1]
+                fn = os.path.join(location, fn)
+                dir = os.path.dirname(fn)
+                if not os.path.exists(dir):
+                    os.makedirs(dir)
+                if fn.endswith('/') or fn.endswith('\\'):
+                    # A directory
+                    if not os.path.exists(fn):
+                        os.makedirs(fn)
+                else:
+                    fp = open(fn, 'wb')
+                    try:
+                        fp.write(data)
+                    finally:
+                        fp.close()
+        finally:
+            zipfp.close()
+
+    def untar_file(self, filename, location):
+        """Untar the file (tar file located at filename) to the destination location"""
+        if not os.path.exists(location):
+            os.makedirs(location)
+        if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
+            mode = 'r:gz'
+        elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
+            mode = 'r:bz2'
+        elif filename.lower().endswith('.tar'):
+            mode = 'r'
+        else:
+            logger.warn('Cannot determine compression type for file %s' % filename)
+            mode = 'r:*'
+        tar = tarfile.open(filename, mode)
+        try:
+            leading = has_leading_dir([member.name for member in tar.getmembers()])
+            for member in tar.getmembers():
+                fn = member.name
+                if leading:
+                    fn = split_leading_dir(fn)[1]
+                path = os.path.join(location, fn)
+                if member.isdir():
+                    if not os.path.exists(path):
+                        os.makedirs(path)
+                else:
+                    try:
+                        fp = tar.extractfile(member)
+                    except (KeyError, AttributeError), e:
+                        # Some corrupt tar files seem to produce this
+                        # (specifically bad symlinks)
+                        logger.warn(
+                            'In the tar file %s the member %s is invalid: %s'
+                            % (filename, member.name, e))
+                        continue
+                    if not os.path.exists(os.path.dirname(path)):
+                        os.makedirs(os.path.dirname(path))
+                    destfp = open(path, 'wb')
+                    try:
+                        shutil.copyfileobj(fp, destfp)
+                    finally:
+                        destfp.close()
+                    fp.close()
+        finally:
+            tar.close()
+
+    def install(self, install_options):
+        """Install everything in this set (after having downloaded and unpacked the packages)"""
+        to_install = sorted([r for r in self.requirements.values()
+                             if self.upgrade or not r.satisfied_by],
+                            key=lambda p: p.name.lower())
+        if to_install:
+            logger.notify('Installing collected packages: %s' % (', '.join([req.name for req in to_install])))
+        logger.indent += 2
+        try:
+            for requirement in to_install:
+                if requirement.conflicts_with:
+                    logger.notify('Found existing installation: %s'
+                                  % requirement.conflicts_with)
+                    logger.indent += 2
+                    try:
+                        requirement.uninstall(auto_confirm=True)
+                    finally:
+                        logger.indent -= 2
+                try:
+                    requirement.install(install_options)
+                except:
+                    # if install did not succeed, rollback previous uninstall
+                    if requirement.conflicts_with and not requirement.install_succeeded:
+                        requirement.rollback_uninstall()
+                    raise
+                requirement.remove_temporary_source()
+        finally:
+            logger.indent -= 2
+        self.successfully_installed = to_install
+
+    def create_bundle(self, bundle_filename):
+        ## FIXME: can't decide which is better; zip is easier to read
+        ## random files from, but tar.bz2 is smaller and not as lame a
+        ## format.
+
+        ## FIXME: this file should really include a manifest of the
+        ## packages, maybe some other metadata files.  It would make
+        ## it easier to detect as well.
+        zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED)
+        vcs_dirs = []
+        for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'):
+            dir = os.path.normcase(os.path.abspath(dir))
+            for dirpath, dirnames, filenames in os.walk(dir):
+                for backend in vcs.backends:
+                    vcs_backend = backend()
+                    vcs_url = vcs_rev = None
+                    if vcs_backend.dirname in dirnames:
+                        for vcs_dir in vcs_dirs:
+                            if dirpath.startswith(vcs_dir):
+                                # vcs bundle file already in parent directory
+                                break
+                        else:
+                            vcs_url, vcs_rev = vcs_backend.get_info(
+                                os.path.join(dir, dirpath))
+                            vcs_dirs.append(dirpath)
+                        vcs_bundle_file = vcs_backend.bundle_file
+                        vcs_guide = vcs_backend.guide % {'url': vcs_url,
+                                                         'rev': vcs_rev}
+                        dirnames.remove(vcs_backend.dirname)
+                        break
+                if 'pip-egg-info' in dirnames:
+                    dirnames.remove('pip-egg-info')
+                for dirname in dirnames:
+                    dirname = os.path.join(dirpath, dirname)
+                    name = self._clean_zip_name(dirname, dir)
+                    zip.writestr(basename + '/' + name + '/', '')
+                for filename in filenames:
+                    if filename == 'pip-delete-this-directory.txt':
+                        continue
+                    filename = os.path.join(dirpath, filename)
+                    name = self._clean_zip_name(filename, dir)
+                    zip.write(filename, basename + '/' + name)
+                if vcs_url:
+                    name = os.path.join(dirpath, vcs_bundle_file)
+                    name = self._clean_zip_name(name, dir)
+                    zip.writestr(basename + '/' + name, vcs_guide)
+
+        zip.writestr('pip-manifest.txt', self.bundle_requirements())
+        zip.close()
+        # Unlike installation, this will always delete the build directories
+        logger.info('Removing temporary build dir %s and source dir %s'
+                    % (self.build_dir, self.src_dir))
+        for dir in self.build_dir, self.src_dir:
+            if os.path.exists(dir):
+                shutil.rmtree(dir)
+
+
+    BUNDLE_HEADER = '''\
+# This is a pip bundle file, that contains many source packages
+# that can be installed as a group.  You can install this like:
+#     pip this_file.zip
+# The rest of the file contains a list of all the packages included:
+'''
+
+    def bundle_requirements(self):
+        parts = [self.BUNDLE_HEADER]
+        for req in sorted(
+            [req for req in self.requirements.values()
+             if not req.comes_from],
+            key=lambda x: x.name):
+            parts.append('%s==%s\n' % (req.name, req.installed_version))
+        parts.append('# These packages were installed to satisfy the above requirements:\n')
+        for req in sorted(
+            [req for req in self.requirements.values()
+             if req.comes_from],
+            key=lambda x: x.name):
+            parts.append('%s==%s\n' % (req.name, req.installed_version))
+        ## FIXME: should we do something with self.unnamed_requirements?
+        return ''.join(parts)
+
+    def _clean_zip_name(self, name, prefix):
+        assert name.startswith(prefix+'/'), (
+            "name %r doesn't start with prefix %r" % (name, prefix))
+        name = name[len(prefix)+1:]
+        name = name.replace(os.path.sep, '/')
+        return name
+
+class HTMLPage(object):
+    """Represents one page, along with its URL"""
+
+    ## FIXME: these regexes are horrible hacks:
+    _homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
+    _download_re = re.compile(r'<th>\s*download\s+url', re.I)
+    ## These aren't so aweful:
+    _rel_re = re.compile("""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""", re.I)
+    _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
+    _base_re = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I)
+
+    def __init__(self, content, url, headers=None):
+        self.content = content
+        self.url = url
+        self.headers = headers
+
+    def __str__(self):
+        return self.url
+
+    @classmethod
+    def get_page(cls, link, req, cache=None, skip_archives=True):
+        url = link.url
+        url = url.split('#', 1)[0]
+        if cache.too_many_failures(url):
+            return None
+        if url.lower().startswith('svn'):
+            logger.debug('Cannot look at svn URL %s' % link)
+            return None
+        if cache is not None:
+            inst = cache.get_page(url)
+            if inst is not None:
+                return inst
+        try:
+            if skip_archives:
+                if cache is not None:
+                    if cache.is_archive(url):
+                        return None
+                filename = link.filename
+                for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
+                    if filename.endswith(bad_ext):
+                        content_type = cls._get_content_type(url)
+                        if content_type.lower().startswith('text/html'):
+                            break
+                        else:
+                            logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
+                            if cache is not None:
+                                cache.set_is_archive(url)
+                            return None
+            logger.debug('Getting page %s' % url)
+            resp = urllib2.urlopen(url)
+            real_url = resp.geturl()
+            headers = resp.info()
+            inst = cls(resp.read(), real_url, headers)
+        except (urllib2.HTTPError, urllib2.URLError, socket.timeout, socket.error), e:
+            desc = str(e)
+            if isinstance(e, socket.timeout):
+                log_meth = logger.info
+                level =1
+                desc = 'timed out'
+            elif isinstance(e, urllib2.URLError):
+                log_meth = logger.info
+                if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout):
+                    desc = 'timed out'
+                    level = 1
+                else:
+                    level = 2
+            elif isinstance(e, urllib2.HTTPError) and e.code == 404:
+                ## FIXME: notify?
+                log_meth = logger.info
+                level = 2
+            else:
+                log_meth = logger.info
+                level = 1
+            log_meth('Could not fetch URL %s: %s' % (link, desc))
+            log_meth('Will skip URL %s when looking for download links for %s' % (link.url, req))
+            if cache is not None:
+                cache.add_page_failure(url, level)
+            return None
+        if cache is not None:
+            cache.add_page([url, real_url], inst)
+        return inst
+
+    @staticmethod
+    def _get_content_type(url):
+        """Get the Content-Type of the given url, using a HEAD request"""
+        scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
+        if scheme == 'http':
+            ConnClass = httplib.HTTPConnection
+        elif scheme == 'https':
+            ConnClass = httplib.HTTPSConnection
+        else:
+            ## FIXME: some warning or something?
+            ## assertion error?
+            return ''
+        if query:
+            path += '?' + query
+        conn = ConnClass(netloc)
+        try:
+            conn.request('HEAD', path, headers={'Host': netloc})
+            resp = conn.getresponse()
+            if resp.status != 200:
+                ## FIXME: doesn't handle redirects
+                return ''
+            return resp.getheader('Content-Type') or ''
+        finally:
+            conn.close()
+
+    @property
+    def base_url(self):
+        if not hasattr(self, "_base_url"):
+            match = self._base_re.search(self.content)
+            if match:
+                self._base_url = match.group(1)
+            else:
+                self._base_url = self.url
+        return self._base_url
+
+    @property
+    def links(self):
+        """Yields all links in the page"""
+        for match in self._href_re.finditer(self.content):
+            url = match.group(1) or match.group(2) or match.group(3)
+            url = self.clean_link(urlparse.urljoin(self.base_url, url))
+            yield Link(url, self)
+
+    def rel_links(self):
+        for url in self.explicit_rel_links():
+            yield url
+        for url in self.scraped_rel_links():
+            yield url
+
+    def explicit_rel_links(self, rels=('homepage', 'download')):
+        """Yields all links with the given relations"""
+        for match in self._rel_re.finditer(self.content):
+            found_rels = match.group(1).lower().split()
+            for rel in rels:
+                if rel in found_rels:
+                    break
+            else:
+                continue
+            match = self._href_re.search(match.group(0))
+            if not match:
+                continue
+            url = match.group(1) or match.group(2) or match.group(3)
+            url = self.clean_link(urlparse.urljoin(self.base_url, url))
+            yield Link(url, self)
+
+    def scraped_rel_links(self):
+        for regex in (self._homepage_re, self._download_re):
+            match = regex.search(self.content)
+            if not match:
+                continue
+            href_match = self._href_re.search(self.content, pos=match.end())
+            if not href_match:
+                continue
+            url = match.group(1) or match.group(2) or match.group(3)
+            if not url:
+                continue
+            url = self.clean_link(urlparse.urljoin(self.base_url, url))
+            yield Link(url, self)
+
+    _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
+
+    def clean_link(self, url):
+        """Makes sure a link is fully encoded.  That is, if a ' ' shows up in
+        the link, it will be rewritten to %20 (while not over-quoting
+        % or other characters)."""
+        return self._clean_re.sub(
+            lambda match: '%%%2x' % ord(match.group(0)), url)
+
+class PageCache(object):
+    """Cache of HTML pages"""
+
+    failure_limit = 3
+
+    def __init__(self):
+        self._failures = {}
+        self._pages = {}
+        self._archives = {}
+
+    def too_many_failures(self, url):
+        return self._failures.get(url, 0) >= self.failure_limit
+
+    def get_page(self, url):
+        return self._pages.get(url)
+
+    def is_archive(self, url):
+        return self._archives.get(url, False)
+
+    def set_is_archive(self, url, value=True):
+        self._archives[url] = value
+
+    def add_page_failure(self, url, level):
+        self._failures[url] = self._failures.get(url, 0)+level
+
+    def add_page(self, urls, page):
+        for url in urls:
+            self._pages[url] = page
+
+class Link(object):
+
+    def __init__(self, url, comes_from=None):
+        self.url = url
+        self.comes_from = comes_from
+
+    def __str__(self):
+        if self.comes_from:
+            return '%s (from %s)' % (self.url, self.comes_from)
+        else:
+            return self.url
+
+    def __repr__(self):
+        return '<Link %s>' % self
+
+    def __eq__(self, other):
+        return self.url == other.url
+
+    def __hash__(self):
+        return hash(self.url)
+
+    @property
+    def filename(self):
+        url = self.url
+        url = url.split('#', 1)[0]
+        url = url.split('?', 1)[0]
+        url = url.rstrip('/')
+        name = posixpath.basename(url)
+        assert name, (
+            'URL %r produced no filename' % url)
+        return name
+
+    @property
+    def scheme(self):
+        return urlparse.urlsplit(self.url)[0]
+
+    @property
+    def path(self):
+        return urlparse.urlsplit(self.url)[2]
+
+    def splitext(self):
+        return splitext(posixpath.basename(self.path.rstrip('/')))
+
+    _egg_fragment_re = re.compile(r'#egg=([^&]*)')
+
+    @property
+    def egg_fragment(self):
+        match = self._egg_fragment_re.search(self.url)
+        if not match:
+            return None
+        return match.group(1)
+
+    _md5_re = re.compile(r'md5=([a-f0-9]+)')
+
+    @property
+    def md5_hash(self):
+        match = self._md5_re.search(self.url)
+        if match:
+            return match.group(1)
+        return None
+
+    @property
+    def show_url(self):
+        return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
+
+############################################################
+## Writing freeze files
+
+
+class FrozenRequirement(object):
+
+    def __init__(self, name, req, editable, comments=()):
+        self.name = name
+        self.req = req
+        self.editable = editable
+        self.comments = comments
+
+    _rev_re = re.compile(r'-r(\d+)$')
+    _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
+
+    @classmethod
+    def from_dist(cls, dist, dependency_links, find_tags=False):
+        location = os.path.normcase(os.path.abspath(dist.location))
+        comments = []
+        if vcs.get_backend_name(location):
+            editable = True
+            req = get_src_requirement(dist, location, find_tags)
+            if req is None:
+                logger.warn('Could not determine repository location of %s' % location)
+                comments.append('## !! Could not determine repository location')
+                req = dist.as_requirement()
+                editable = False
+        else:
+            editable = False
+            req = dist.as_requirement()
+            specs = req.specs
+            assert len(specs) == 1 and specs[0][0] == '=='
+            version = specs[0][1]
+            ver_match = cls._rev_re.search(version)
+            date_match = cls._date_re.search(version)
+            if ver_match or date_match:
+                svn_backend = vcs.get_backend('svn')
+                if svn_backend:
+                    svn_location = svn_backend(
+                        ).get_location(dist, dependency_links)
+                if not svn_location:
+                    logger.warn(
+                        'Warning: cannot find svn location for %s' % req)
+                    comments.append('## FIXME: could not find svn URL in dependency_links for this package:')
+                else:
+                    comments.append('# Installing as editable to satisfy requirement %s:' % req)
+                    if ver_match:
+                        rev = ver_match.group(1)
+                    else:
+                        rev = '{%s}' % date_match.group(1)
+                    editable = True
+                    req = 'svn+%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist))
+        return cls(dist.project_name, req, editable, comments)
+
+    @staticmethod
+    def egg_name(dist):
+        name = dist.egg_name()
+        match = re.search(r'-py\d\.\d$', name)
+        if match:
+            name = name[:match.start()]
+        return name
+
+    def __str__(self):
+        req = self.req
+        if self.editable:
+            req = '-e %s' % req
+        return '\n'.join(list(self.comments)+[str(req)])+'\n'
+
+class VersionControl(object):
+    name = ''
+    dirname = ''
+
+    def __init__(self, url=None, *args, **kwargs):
+        self.url = url
+        self._cmd = None
+        super(VersionControl, self).__init__(*args, **kwargs)
+
+    def _filter(self, line):
+        return (Logger.INFO, line)
+
+    @property
+    def cmd(self):
+        if self._cmd is not None:
+            return self._cmd
+        command = find_command(self.name)
+        if command is None:
+            raise BadCommand('Cannot find command %s' % self.name)
+        logger.info('Found command %s at %s' % (self.name, command))
+        self._cmd = command
+        return command
+
+    def get_url_rev(self):
+        """
+        Returns the correct repository URL and revision by parsing the given
+        repository URL
+        """
+        url = self.url.split('+', 1)[1]
+        scheme, netloc, path, query, frag = urlparse.urlsplit(url)
+        rev = None
+        if '@' in path:
+            path, rev = path.rsplit('@', 1)
+        url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
+        return url, rev
+
+    def get_info(self, location):
+        """
+        Returns (url, revision), where both are strings
+        """
+        assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
+        return self.get_url(location), self.get_revision(location)
+
+    def normalize_url(self, url):
+        """
+        Normalize a URL for comparison by unquoting it and removing any trailing slash.
+        """
+        return urllib.unquote(url).rstrip('/')
+
+    def compare_urls(self, url1, url2):
+        """
+        Compare two repo URLs for identity, ignoring incidental differences.
+        """
+        return (self.normalize_url(url1) == self.normalize_url(url2))
+
+    def parse_vcs_bundle_file(self, content):
+        """
+        Takes the contents of the bundled text file that explains how to revert
+        the stripped off version control data of the given package and returns
+        the URL and revision of it.
+        """
+        raise NotImplementedError
+
+    def obtain(self, dest):
+        """
+        Called when installing or updating an editable package, takes the
+        source path of the checkout.
+        """
+        raise NotImplementedError
+
+    def switch(self, dest, url, rev_options):
+        """
+        Switch the repo at ``dest`` to point to ``URL``.
+        """
+        raise NotImplemented
+
+    def update(self, dest, rev_options):
+        """
+        Update an already-existing repo to the given ``rev_options``.
+        """
+        raise NotImplementedError
+
+    def check_destination(self, dest, url, rev_options, rev_display):
+        """
+        Prepare a location to receive a checkout/clone.
+
+        Return True if the location is ready for (and requires) a
+        checkout/clone, False otherwise.
+        """
+        checkout = True
+        prompt = False
+        if os.path.exists(dest):
+            checkout = False
+            if os.path.exists(os.path.join(dest, self.dirname)):
+                existing_url = self.get_url(dest)
+                if self.compare_urls(existing_url, url):
+                    logger.info('%s in %s exists, and has correct URL (%s)'
+                                % (self.repo_name.title(), display_path(dest), url))
+                    logger.notify('Updating %s %s%s'
+                                  % (display_path(dest), self.repo_name, rev_display))
+                    self.update(dest, rev_options)
+                else:
+                    logger.warn('%s %s in %s exists with URL %s'
+                                % (self.name, self.repo_name, display_path(dest), existing_url))
+                    prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b'))
+            else:
+                logger.warn('Directory %s already exists, and is not a %s %s.'
+                            % (dest, self.name, self.repo_name))
+                prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
+        if prompt:
+            logger.warn('The plan is to install the %s repository %s'
+                        % (self.name, url))
+            response = ask('What to do?  %s' % prompt[0], prompt[1])
+
+            if response == 's':
+                logger.notify('Switching %s %s to %s%s'
+                              % (self.repo_name, display_path(dest), url, rev_display))
+                self.switch(dest, url, rev_options)
+            elif response == 'i':
+                # do nothing
+                pass
+            elif response == 'w':
+                logger.warn('Deleting %s' % display_path(dest))
+                shutil.rmtree(dest)
+                checkout = True
+            elif response == 'b':
+                dest_dir = backup_dir(dest)
+                logger.warn('Backing up %s to %s'
+                            % (display_path(dest), dest_dir))
+                shutil.move(dest, dest_dir)
+                checkout = True
+        return checkout
+
+    def unpack(self, location):
+        raise NotImplementedError
+
+    def get_src_requirement(self, dist, location, find_tags=False):
+        raise NotImplementedError
+
+_svn_xml_url_re = re.compile('url="([^"]+)"')
+_svn_rev_re = re.compile('committed-rev="(\d+)"')
+_svn_url_re = re.compile(r'URL: (.+)')
+_svn_revision_re = re.compile(r'Revision: (.+)')
+
+class Subversion(VersionControl):
+    name = 'svn'
+    dirname = '.svn'
+    repo_name = 'checkout'
+    schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https')
+    bundle_file = 'svn-checkout.txt'
+    guide = ('# This was an svn checkout; to make it a checkout again run:\n'
+            'svn checkout --force -r %(rev)s %(url)s .\n')
+
+    def get_info(self, location):
+        """Returns (url, revision), where both are strings"""
+        assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
+        output = call_subprocess(
+            ['svn', 'info', location], show_stdout=False, extra_environ={'LANG': 'C'})
+        match = _svn_url_re.search(output)
+        if not match:
+            logger.warn('Cannot determine URL of svn checkout %s' % display_path(location))
+            logger.info('Output that cannot be parsed: \n%s' % output)
+            return None, None
+        url = match.group(1).strip()
+        match = _svn_revision_re.search(output)
+        if not match:
+            logger.warn('Cannot determine revision of svn checkout %s' % display_path(location))
+            logger.info('Output that cannot be parsed: \n%s' % output)
+            return url, None
+        return url, match.group(1)
+
+    def get_url(self, location):
+        return self.get_info(location)[0]
+
+    def get_revision(self, location):
+        return self.get_info(location)[1]
+
+    def parse_vcs_bundle_file(self, content):
+        for line in content.splitlines():
+            if not line.strip() or line.strip().startswith('#'):
+                continue
+            match = re.search(r'^-r\s*([^ ])?', line)
+            if not match:
+                return None, None
+            rev = match.group(1)
+            rest = line[match.end():].strip().split(None, 1)[0]
+            return rest, rev
+        return None, None
+
+    def unpack(self, location):
+        """Check out the svn repository at the url to the destination location"""
+        url, rev = self.get_url_rev()
+        logger.notify('Checking out svn repository %s to %s' % (url, location))
+        logger.indent += 2
+        try:
+            if os.path.exists(location):
+                # Subversion doesn't like to check out over an existing directory
+                # --force fixes this, but was only added in svn 1.5
+                shutil.rmtree(location, onerror=rmtree_errorhandler)
+            call_subprocess(
+                ['svn', 'checkout', url, location],
+                filter_stdout=self._filter, show_stdout=False)
+        finally:
+            logger.indent -= 2
+
+    def export(self, location):
+        """Export the svn repository at the url to the destination location"""
+        url, rev = self.get_url_rev()
+        logger.notify('Checking out svn repository %s to %s' % (url, location))
+        logger.indent += 2
+        try:
+            if os.path.exists(location):
+                # Subversion doesn't like to check out over an existing directory
+                # --force fixes this, but was only added in svn 1.5
+                shutil.rmtree(location, onerror=rmtree_errorhandler)
+            call_subprocess(
+                ['svn', 'export', url, location],
+                filter_stdout=self._filter, show_stdout=False)
+        finally:
+            logger.indent -= 2
+
+    def switch(self, dest, url, rev_options):
+        call_subprocess(
+            ['svn', 'switch'] + rev_options + [url, dest])
+
+    def update(self, dest, rev_options):
+        call_subprocess(
+            ['svn', 'update'] + rev_options + [dest])
+
+    def obtain(self, dest):
+        url, rev = self.get_url_rev()
+        if rev:
+            rev_options = ['-r', rev]
+            rev_display = ' (to revision %s)' % rev
+        else:
+            rev_options = []
+            rev_display = ''
+        if self.check_destination(dest, url, rev_options, rev_display):
+            logger.notify('Checking out %s%s to %s'
+                          % (url, rev_display, display_path(dest)))
+            call_subprocess(
+                ['svn', 'checkout', '-q'] + rev_options + [url, dest])
+
+    def get_location(self, dist, dependency_links):
+        egg_fragment_re = re.compile(r'#egg=(.*)$')
+        for url in dependency_links:
+            egg_fragment = Link(url).egg_fragment
+            if not egg_fragment:
+                continue
+            if '-' in egg_fragment:
+                ## FIXME: will this work when a package has - in the name?
+                key = '-'.join(egg_fragment.split('-')[:-1]).lower()
+            else:
+                key = egg_fragment
+            if key == dist.key:
+                return url.split('#', 1)[0]
+        return None
+
+    def get_revision(self, location):
+        """
+        Return the maximum revision for all files under a given location
+        """
+        # Note: taken from setuptools.command.egg_info
+        revision = 0
+
+        for base, dirs, files in os.walk(location):
+            if self.dirname not in dirs:
+                dirs[:] = []
+                continue    # no sense walking uncontrolled subdirs
+            dirs.remove(self.dirname)
+            entries_fn = os.path.join(base, self.dirname, 'entries')
+            if not os.path.exists(entries_fn):
+                ## FIXME: should we warn?
+                continue
+            f = open(entries_fn)
+            data = f.read()
+            f.close()
+
+            if data.startswith('8') or data.startswith('9') or data.startswith('10'):
+                data = map(str.splitlines,data.split('\n\x0c\n'))
+                del data[0][0]  # get rid of the '8'
+                dirurl = data[0][3]
+                revs = [int(d[9]) for d in data if len(d)>9 and d[9]]+[0]
+                if revs:
+                    localrev = max(revs)
+                else:
+                    localrev = 0
+            elif data.startswith('<?xml'):
+                dirurl = _svn_xml_url_re.search(data).group(1)    # get repository URL
+                revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)]+[0]
+                if revs:
+                    localrev = max(revs)
+                else:
+                    localrev = 0
+            else:
+                logger.warn("Unrecognized .svn/entries format; skipping %s", base)
+                dirs[:] = []
+                continue
+            if base == location:
+                base_url = dirurl+'/'   # save the root url
+            elif not dirurl.startswith(base_url):
+                dirs[:] = []
+                continue    # not part of the same svn tree, skip it
+            revision = max(revision, localrev)
+        return revision
+
+    def get_url(self, location):
+        # In cases where the source is in a subdirectory, not alongside setup.py
+        # we have to look up in the location until we find a real setup.py
+        orig_location = location
+        while not os.path.exists(os.path.join(location, 'setup.py')):
+            last_location = location
+            location = os.path.dirname(location)
+            if location == last_location:
+                # We've traversed up to the root of the filesystem without finding setup.py
+                logger.warn("Could not find setup.py for directory %s (tried all parent directories)"
+                            % orig_location)
+                return None
+        f = open(os.path.join(location, self.dirname, 'entries'))
+        data = f.read()
+        f.close()
+        if data.startswith('8') or data.startswith('9') or data.startswith('10'):
+            data = map(str.splitlines,data.split('\n\x0c\n'))
+            del data[0][0]  # get rid of the '8'
+            return data[0][3]
+        elif data.startswith('<?xml'):
+            match = _svn_xml_url_re.search(data)
+            if not match:
+                raise ValueError('Badly formatted data: %r' % data)
+            return match.group(1)    # get repository URL
+        else:
+            logger.warn("Unrecognized .svn/entries format in %s" % location)
+            # Or raise exception?
+            return None
+
+    def get_tag_revs(self, svn_tag_url):
+        stdout = call_subprocess(
+            ['svn', 'ls', '-v', svn_tag_url], show_stdout=False)
+        results = []
+        for line in stdout.splitlines():
+            parts = line.split()
+            rev = int(parts[0])
+            tag = parts[-1].strip('/')
+            results.append((tag, rev))
+        return results
+
+    def find_tag_match(self, rev, tag_revs):
+        best_match_rev = None
+        best_tag = None
+        for tag, tag_rev in tag_revs:
+            if (tag_rev > rev and
+                (best_match_rev is None or best_match_rev > tag_rev)):
+                # FIXME: Is best_match > tag_rev really possible?
+                # or is it a sign something is wacky?
+                best_match_rev = tag_rev
+                best_tag = tag
+        return best_tag
+
+    def get_src_requirement(self, dist, location, find_tags=False):
+        repo = self.get_url(location)
+        if repo is None:
+            return None
+        parts = repo.split('/')
+        ## FIXME: why not project name?
+        egg_project_name = dist.egg_name().split('-', 1)[0]
+        rev = self.get_revision(location)
+        if parts[-2] in ('tags', 'tag'):
+            # It's a tag, perfect!
+            full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
+        elif parts[-2] in ('branches', 'branch'):
+            # It's a branch :(
+            full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
+        elif parts[-1] == 'trunk':
+            # Trunk :-/
+            full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
+            if find_tags:
+                tag_url = '/'.join(parts[:-1]) + '/tags'
+                tag_revs = self.get_tag_revs(tag_url)
+                match = self.find_tag_match(rev, tag_revs)
+                if match:
+                    logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
+                    repo = '%s/%s' % (tag_url, match)
+                    full_egg_name = '%s-%s' % (egg_project_name, match)
+        else:
+            # Don't know what it is
+            logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
+            full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
+        return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
+
+vcs.register(Subversion)
+
+
+class Git(VersionControl):
+    name = 'git'
+    dirname = '.git'
+    repo_name = 'clone'
+    schemes = ('git', 'git+http', 'git+ssh', 'git+git')
+    bundle_file = 'git-clone.txt'
+    guide = ('# This was a Git repo; to make it a repo again run:\n'
+        'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n')
+
+    def parse_vcs_bundle_file(self, content):
+        url = rev = None
+        for line in content.splitlines():
+            if not line.strip() or line.strip().startswith('#'):
+                continue
+            url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line)
+            if url_match:
+                url = url_match.group(1).strip()
+            rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line)
+            if rev_match:
+                rev = rev_match.group(1).strip()
+            if url and rev:
+                return url, rev
+        return None, None
+
+    def unpack(self, location):
+        """Clone the Git repository at the url to the destination location"""
+        url, rev = self.get_url_rev()
+        logger.notify('Cloning Git repository %s to %s' % (url, location))
+        logger.indent += 2
+        try:
+            if os.path.exists(location):
+                os.rmdir(location)
+            call_subprocess(
+                [self.cmd, 'clone', url, location],
+                filter_stdout=self._filter, show_stdout=False)
+        finally:
+            logger.indent -= 2
+
+    def export(self, location):
+        """Export the Git repository at the url to the destination location"""
+        temp_dir = tempfile.mkdtemp('-export', 'pip-')
+        self.unpack(temp_dir)
+        try:
+            if not location.endswith('/'):
+                location = location + '/'
+            call_subprocess(
+                [self.cmd, 'checkout-index', '-a', '-f', '--prefix', location],
+                filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
+        finally:
+            shutil.rmtree(temp_dir)
+
+    def check_rev_options(self, rev, dest, rev_options):
+        """Check the revision options before checkout to compensate that tags
+        and branches may need origin/ as a prefix"""
+        if rev is None:
+            # bail and use preset
+            return rev_options
+        revisions = self.get_tag_revs(dest)
+        revisions.update(self.get_branch_revs(dest))
+        if rev in revisions:
+            # if rev is a sha
+            return [rev]
+        inverse_revisions = dict((v,k) for k, v in revisions.iteritems())
+        if rev not in inverse_revisions: # is rev a name or tag?
+            origin_rev = 'origin/%s' % rev
+            if origin_rev in inverse_revisions:
+                rev = inverse_revisions[origin_rev]
+            else:
+                logger.warn("Could not find a tag or branch '%s', assuming commit." % rev)
+        return [rev]
+
+    def switch(self, dest, url, rev_options):
+
+        call_subprocess(
+            [self.cmd, 'config', 'remote.origin.url', url], cwd=dest)
+        call_subprocess(
+            [self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
+
+    def update(self, dest, rev_options):
+        call_subprocess([self.cmd, 'fetch', '-q'], cwd=dest)
+        call_subprocess(
+            [self.cmd, 'checkout', '-q', '-f'] + rev_options, cwd=dest)
+
+    def obtain(self, dest):
+        url, rev = self.get_url_rev()
+        if rev:
+            rev_options = [rev]
+            rev_display = ' (to %s)' % rev
+        else:
+            rev_options = ['origin/master']
+            rev_display = ''
+        if self.check_destination(dest, url, rev_options, rev_display):
+            logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest)))
+            call_subprocess(
+                [self.cmd, 'clone', '-q', url, dest])
+            rev_options = self.check_rev_options(rev, dest, rev_options)
+            call_subprocess(
+                [self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
+
+    def get_url(self, location):
+        url = call_subprocess(
+            [self.cmd, 'config', 'remote.origin.url'],
+            show_stdout=False, cwd=location)
+        return url.strip()
+
+    def get_revision(self, location):
+        current_rev = call_subprocess(
+            [self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location)
+        return current_rev.strip()
+
+    def get_tag_revs(self, location):
+        tags = call_subprocess(
+            [self.cmd, 'tag'], show_stdout=False, cwd=location)
+        tag_revs = []
+        for line in tags.splitlines():
+            tag = line.strip()
+            rev = call_subprocess(
+                [self.cmd, 'rev-parse', tag], show_stdout=False, cwd=location)
+            tag_revs.append((rev.strip(), tag))
+        tag_revs = dict(tag_revs)
+        return tag_revs
+
+    def get_branch_revs(self, location):
+        branches = call_subprocess(
+            [self.cmd, 'branch', '-r'], show_stdout=False, cwd=location)
+        branch_revs = []
+        for line in branches.splitlines():
+            line = line.split('->')[0].strip()
+            branch = "".join([b for b in line.split() if b != '*'])
+            rev = call_subprocess(
+                [self.cmd, 'rev-parse', branch], show_stdout=False, cwd=location)
+            branch_revs.append((rev.strip(), branch))
+        branch_revs = dict(branch_revs)
+        return branch_revs
+
+    def get_src_requirement(self, dist, location, find_tags):
+        repo = self.get_url(location)
+        if not repo.lower().startswith('git:'):
+            repo = 'git+' + repo
+        egg_project_name = dist.egg_name().split('-', 1)[0]
+        if not repo:
+            return None
+        current_rev = self.get_revision(location)
+        tag_revs = self.get_tag_revs(location)
+        branch_revs = self.get_branch_revs(location)
+
+        if current_rev in tag_revs:
+            # It's a tag
+            full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
+        elif (current_rev in branch_revs and
+              branch_revs[current_rev] != 'origin/master'):
+            # It's the head of a branch
+            full_egg_name = '%s-%s' % (dist.egg_name(),
+                                       branch_revs[current_rev].replace('origin/', ''))
+        else:
+            full_egg_name = '%s-dev' % dist.egg_name()
+
+        return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
+
+    def get_url_rev(self):
+        """
+        Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
+        That's required because although they use SSH they sometimes doesn't
+        work with a ssh:// scheme (e.g. Github). But we need a scheme for
+        parsing. Hence we remove it again afterwards and return it as a stub.
+        """
+        if not '://' in self.url:
+            self.url = self.url.replace('git+', 'git+ssh://')
+            url, rev = super(Git, self).get_url_rev()
+            url = url.replace('ssh://', '')
+            return url, rev
+        return super(Git, self).get_url_rev()
+
+vcs.register(Git)
+
+class Mercurial(VersionControl):
+    name = 'hg'
+    dirname = '.hg'
+    repo_name = 'clone'
+    schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
+    bundle_file = 'hg-clone.txt'
+    guide = ('# This was a Mercurial repo; to make it a repo again run:\n'
+            'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n')
+
+    def parse_vcs_bundle_file(self, content):
+        url = rev = None
+        for line in content.splitlines():
+            if not line.strip() or line.strip().startswith('#'):
+                continue
+            url_match = re.search(r'hg\s*pull\s*(.*)\s*', line)
+            if url_match:
+                url = url_match.group(1).strip()
+            rev_match = re.search(r'^hg\s*update\s*-r\s*(.*)\s*', line)
+            if rev_match:
+                rev = rev_match.group(1).strip()
+            if url and rev:
+                return url, rev
+        return None, None
+
+    def unpack(self, location):
+        """Clone the Hg repository at the url to the destination location"""
+        url, rev = self.get_url_rev()
+        logger.notify('Cloning Mercurial repository %s to %s' % (url, location))
+        logger.indent += 2
+        try:
+            if os.path.exists(location):
+                os.rmdir(location)
+            call_subprocess(
+                ['hg', 'clone', url, location],
+                filter_stdout=self._filter, show_stdout=False)
+        finally:
+            logger.indent -= 2
+
+    def export(self, location):
+        """Export the Hg repository at the url to the destination location"""
+        temp_dir = tempfile.mkdtemp('-export', 'pip-')
+        self.unpack(temp_dir)
+        try:
+            call_subprocess(
+                ['hg', 'archive', location],
+                filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
+        finally:
+            shutil.rmtree(temp_dir)
+
+    def switch(self, dest, url, rev_options):
+        repo_config = os.path.join(dest, self.dirname, 'hgrc')
+        config = ConfigParser.SafeConfigParser()
+        try:
+            config.read(repo_config)
+            config.set('paths', 'default', url)
+            config_file = open(repo_config, 'w')
+            config.write(config_file)
+            config_file.close()
+        except (OSError, ConfigParser.NoSectionError), e:
+            logger.warn(
+                'Could not switch Mercurial repository to %s: %s'
+                % (url, e))
+        else:
+            call_subprocess(['hg', 'update', '-q'] + rev_options, cwd=dest)
+
+    def update(self, dest, rev_options):
+        call_subprocess(['hg', 'pull', '-q'], cwd=dest)
+        call_subprocess(
+            ['hg', 'update', '-q'] + rev_options, cwd=dest)
+
+    def obtain(self, dest):
+        url, rev = self.get_url_rev()
+        if rev:
+            rev_options = [rev]
+            rev_display = ' (to revision %s)' % rev
+        else:
+            rev_options = []
+            rev_display = ''
+        if self.check_destination(dest, url, rev_options, rev_display):
+            logger.notify('Cloning hg %s%s to %s'
+                          % (url, rev_display, display_path(dest)))
+            call_subprocess(['hg', 'clone', '-q', url, dest])
+            call_subprocess(['hg', 'update', '-q'] + rev_options, cwd=dest)
+
+    def get_url(self, location):
+        url = call_subprocess(
+            ['hg', 'showconfig', 'paths.default'],
+            show_stdout=False, cwd=location).strip()
+        if url.startswith('/') or url.startswith('\\'):
+            url = filename_to_url(url)
+        return url.strip()
+
+    def get_tag_revs(self, location):
+        tags = call_subprocess(
+            ['hg', 'tags'], show_stdout=False, cwd=location)
+        tag_revs = []
+        for line in tags.splitlines():
+            tags_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
+            if tags_match:
+                tag = tags_match.group(1)
+                rev = tags_match.group(2)
+                tag_revs.append((rev.strip(), tag.strip()))
+        return dict(tag_revs)
+
+    def get_branch_revs(self, location):
+        branches = call_subprocess(
+            ['hg', 'branches'], show_stdout=False, cwd=location)
+        branch_revs = []
+        for line in branches.splitlines():
+            branches_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
+            if branches_match:
+                branch = branches_match.group(1)
+                rev = branches_match.group(2)
+                branch_revs.append((rev.strip(), branch.strip()))
+        return dict(branch_revs)
+
+    def get_revision(self, location):
+        current_revision = call_subprocess(
+            ['hg', 'parents', '--template={rev}'],
+            show_stdout=False, cwd=location).strip()
+        return current_revision
+
+    def get_revision_hash(self, location):
+        current_rev_hash = call_subprocess(
+            ['hg', 'parents', '--template={node}'],
+            show_stdout=False, cwd=location).strip()
+        return current_rev_hash
+
+    def get_src_requirement(self, dist, location, find_tags):
+        repo = self.get_url(location)
+        if not repo.lower().startswith('hg:'):
+            repo = 'hg+' + repo
+        egg_project_name = dist.egg_name().split('-', 1)[0]
+        if not repo:
+            return None
+        current_rev = self.get_revision(location)
+        current_rev_hash = self.get_revision_hash(location)
+        tag_revs = self.get_tag_revs(location)
+        branch_revs = self.get_branch_revs(location)
+        if current_rev in tag_revs:
+            # It's a tag
+            full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
+        elif current_rev in branch_revs:
+            # It's the tip of a branch
+            full_egg_name = '%s-%s' % (dist.egg_name(), branch_revs[current_rev])
+        else:
+            full_egg_name = '%s-dev' % dist.egg_name()
+        return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name)
+
+vcs.register(Mercurial)
+
+
+class Bazaar(VersionControl):
+    name = 'bzr'
+    dirname = '.bzr'
+    repo_name = 'branch'
+    bundle_file = 'bzr-branch.txt'
+    schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp')
+    guide = ('# This was a Bazaar branch; to make it a branch again run:\n'
+             'bzr branch -r %(rev)s %(url)s .\n')
+
+    def parse_vcs_bundle_file(self, content):
+        url = rev = None
+        for line in content.splitlines():
+            if not line.strip() or line.strip().startswith('#'):
+                continue
+            match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line)
+            if match:
+                rev = match.group(1).strip()
+            url = line[match.end():].strip().split(None, 1)[0]
+            if url and rev:
+                return url, rev
+        return None, None
+
+    def unpack(self, location):
+        """Get the bzr branch at the url to the destination location"""
+        url, rev = self.get_url_rev()
+        logger.notify('Checking out bzr repository %s to %s' % (url, location))
+        logger.indent += 2
+        try:
+            if os.path.exists(location):
+                os.rmdir(location)
+            call_subprocess(
+                [self.cmd, 'branch', url, location],
+                filter_stdout=self._filter, show_stdout=False)
+        finally:
+            logger.indent -= 2
+
+    def export(self, location):
+        """Export the Bazaar repository at the url to the destination location"""
+        temp_dir = tempfile.mkdtemp('-export', 'pip-')
+        self.unpack(temp_dir)
+        if os.path.exists(location):
+            # Remove the location to make sure Bazaar can export it correctly
+            shutil.rmtree(location, onerror=rmtree_errorhandler)
+        try:
+            call_subprocess([self.cmd, 'export', location], cwd=temp_dir,
+                            filter_stdout=self._filter, show_stdout=False)
+        finally:
+            shutil.rmtree(temp_dir)
+
+    def switch(self, dest, url, rev_options):
+        call_subprocess([self.cmd, 'switch', url], cwd=dest)
+
+    def update(self, dest, rev_options):
+        call_subprocess(
+            [self.cmd, 'pull', '-q'] + rev_options, cwd=dest)
+
+    def obtain(self, dest):
+        url, rev = self.get_url_rev()
+        if rev:
+            rev_options = ['-r', rev]
+            rev_display = ' (to revision %s)' % rev
+        else:
+            rev_options = []
+            rev_display = ''
+        if self.check_destination(dest, url, rev_options, rev_display):
+            logger.notify('Checking out %s%s to %s'
+                          % (url, rev_display, display_path(dest)))
+            call_subprocess(
+                [self.cmd, 'branch', '-q'] + rev_options + [url, dest])
+
+    def get_url_rev(self):
+        # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
+        url, rev = super(Bazaar, self).get_url_rev()
+        if url.startswith('ssh://'):
+            url = 'bzr+' + url
+        return url, rev
+
+    def get_url(self, location):
+        urls = call_subprocess(
+            [self.cmd, 'info'], show_stdout=False, cwd=location)
+        for line in urls.splitlines():
+            line = line.strip()
+            for x in ('checkout of branch: ',
+                      'parent branch: '):
+                if line.startswith(x):
+                    return line.split(x)[1]
+        return None
+
+    def get_revision(self, location):
+        revision = call_subprocess(
+            [self.cmd, 'revno'], show_stdout=False, cwd=location)
+        return revision.splitlines()[-1]
+
+    def get_tag_revs(self, location):
+        tags = call_subprocess(
+            [self.cmd, 'tags'], show_stdout=False, cwd=location)
+        tag_revs = []
+        for line in tags.splitlines():
+            tags_match = re.search(r'([.\w-]+)\s*(.*)$', line)
+            if tags_match:
+                tag = tags_match.group(1)
+                rev = tags_match.group(2)
+                tag_revs.append((rev.strip(), tag.strip()))
+        return dict(tag_revs)
+
+    def get_src_requirement(self, dist, location, find_tags):
+        repo = self.get_url(location)
+        if not repo.lower().startswith('bzr:'):
+            repo = 'bzr+' + repo
+        egg_project_name = dist.egg_name().split('-', 1)[0]
+        if not repo:
+            return None
+        current_rev = self.get_revision(location)
+        tag_revs = self.get_tag_revs(location)
+
+        if current_rev in tag_revs:
+            # It's a tag
+            tag = tag_revs.get(current_rev, current_rev)
+            full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
+        else:
+            full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev)
+        return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
+
+vcs.register(Bazaar)
+
+def get_src_requirement(dist, location, find_tags):
+    version_control = vcs.get_backend_from_location(location)
+    if version_control:
+        return version_control().get_src_requirement(dist, location, find_tags)
+    logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
+    return dist.as_requirement()
+
+############################################################
+## Requirement files
+
+_scheme_re = re.compile(r'^(http|https|file):', re.I)
+_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
+def get_file_content(url, comes_from=None):
+    """Gets the content of a file; it may be a filename, file: URL, or
+    http: URL.  Returns (location, content)"""
+    match = _scheme_re.search(url)
+    if match:
+        scheme = match.group(1).lower()
+        if (scheme == 'file' and comes_from
+            and comes_from.startswith('http')):
+            raise InstallationError(
+                'Requirements file %s references URL %s, which is local'
+                % (comes_from, url))
+        if scheme == 'file':
+            path = url.split(':', 1)[1]
+            path = path.replace('\\', '/')
+            match = _url_slash_drive_re.match(path)
+            if match:
+                path = match.group(1) + ':' + path.split('|', 1)[1]
+            path = urllib.unquote(path)
+            if path.startswith('/'):
+                path = '/' + path.lstrip('/')
+            url = path
+        else:
+            ## FIXME: catch some errors
+            resp = urllib2.urlopen(url)
+            return resp.geturl(), resp.read()
+    f = open(url)
+    content = f.read()
+    f.close()
+    return url, content
+
+def parse_requirements(filename, finder=None, comes_from=None, options=None):
+    skip_match = None
+    skip_regex = options.skip_requirements_regex
+    if skip_regex:
+        skip_match = re.compile(skip_regex)
+    filename, content = get_file_content(filename, comes_from=comes_from)
+    for line_number, line in enumerate(content.splitlines()):
+        line_number += 1
+        line = line.strip()
+        if not line or line.startswith('#'):
+            continue
+        if skip_match and skip_match.search(line):
+            continue
+        if line.startswith('-r') or line.startswith('--requirement'):
+            if line.startswith('-r'):
+                req_url = line[2:].strip()
+            else:
+                req_url = line[len('--requirement'):].strip().strip('=')
+            if _scheme_re.search(filename):
+                # Relative to a URL
+                req_url = urlparse.urljoin(filename, url)
+            elif not _scheme_re.search(req_url):
+                req_url = os.path.join(os.path.dirname(filename), req_url)
+            for item in parse_requirements(req_url, finder, comes_from=filename, options=options):
+                yield item
+        elif line.startswith('-Z') or line.startswith('--always-unzip'):
+            # No longer used, but previously these were used in
+            # requirement files, so we'll ignore.
+            pass
+        elif finder and line.startswith('-f') or line.startswith('--find-links'):
+            if line.startswith('-f'):
+                line = line[2:].strip()
+            else:
+                line = line[len('--find-links'):].strip().lstrip('=')
+            ## FIXME: it would be nice to keep track of the source of
+            ## the find_links:
+            finder.find_links.append(line)
+        elif line.startswith('-i') or line.startswith('--index-url'):
+            if line.startswith('-i'):
+                line = line[2:].strip()
+            else:
+                line = line[len('--index-url'):].strip().lstrip('=')
+            finder.index_urls = [line]
+        elif line.startswith('--extra-index-url'):
+            line = line[len('--extra-index-url'):].strip().lstrip('=')
+            finder.index_urls.append(line)
+        else:
+            comes_from = '-r %s (line %s)' % (filename, line_number)
+            if line.startswith('-e') or line.startswith('--editable'):
+                if line.startswith('-e'):
+                    line = line[2:].strip()
+                else:
+                    line = line[len('--editable'):].strip()
+                req = InstallRequirement.from_editable(
+                    line, comes_from=comes_from, default_vcs=options.default_vcs)
+            else:
+                req = InstallRequirement.from_line(line, comes_from)
+            yield req
+
+############################################################
+## Logging
+
+
+
+class Logger(object):
+
+    """
+    Logging object for use in command-line script.  Allows ranges of
+    levels, to avoid some redundancy of displayed information.
+    """
+
+    VERBOSE_DEBUG = logging.DEBUG-1
+    DEBUG = logging.DEBUG
+    INFO = logging.INFO
+    NOTIFY = (logging.INFO+logging.WARN)/2
+    WARN = WARNING = logging.WARN
+    ERROR = logging.ERROR
+    FATAL = logging.FATAL
+
+    LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
+
+    def __init__(self, consumers):
+        self.consumers = consumers
+        self.indent = 0
+        self.explicit_levels = False
+        self.in_progress = None
+        self.in_progress_hanging = False
+
+    def debug(self, msg, *args, **kw):
+        self.log(self.DEBUG, msg, *args, **kw)
+    def info(self, msg, *args, **kw):
+        self.log(self.INFO, msg, *args, **kw)
+    def notify(self, msg, *args, **kw):
+        self.log(self.NOTIFY, msg, *args, **kw)
+    def warn(self, msg, *args, **kw):
+        self.log(self.WARN, msg, *args, **kw)
+    def error(self, msg, *args, **kw):
+        self.log(self.WARN, msg, *args, **kw)
+    def fatal(self, msg, *args, **kw):
+        self.log(self.FATAL, msg, *args, **kw)
+    def log(self, level, msg, *args, **kw):
+        if args:
+            if kw:
+                raise TypeError(
+                    "You may give positional or keyword arguments, not both")
+        args = args or kw
+        rendered = None
+        for consumer_level, consumer in self.consumers:
+            if self.level_matches(level, consumer_level):
+                if (self.in_progress_hanging
+                    and consumer in (sys.stdout, sys.stderr)):
+                    self.in_progress_hanging = False
+                    sys.stdout.write('\n')
+                    sys.stdout.flush()
+                if rendered is None:
+                    if args:
+                        rendered = msg % args
+                    else:
+                        rendered = msg
+                    rendered = ' '*self.indent + rendered
+                    if self.explicit_levels:
+                        ## FIXME: should this be a name, not a level number?
+                        rendered = '%02i %s' % (level, rendered)
+                if hasattr(consumer, 'write'):
+                    consumer.write(rendered+'\n')
+                else:
+                    consumer(rendered)
+
+    def start_progress(self, msg):
+        assert not self.in_progress, (
+            "Tried to start_progress(%r) while in_progress %r"
+            % (msg, self.in_progress))
+        if self.level_matches(self.NOTIFY, self._stdout_level()):
+            sys.stdout.write(' '*self.indent + msg)
+            sys.stdout.flush()
+            self.in_progress_hanging = True
+        else:
+            self.in_progress_hanging = False
+        self.in_progress = msg
+        self.last_message = None
+
+    def end_progress(self, msg='done.'):
+        assert self.in_progress, (
+            "Tried to end_progress without start_progress")
+        if self.stdout_level_matches(self.NOTIFY):
+            if not self.in_progress_hanging:
+                # Some message has been printed out since start_progress
+                sys.stdout.write('...' + self.in_progress + msg + '\n')
+                sys.stdout.flush()
+            else:
+                # These erase any messages shown with show_progress (besides .'s)
+                logger.show_progress('')
+                logger.show_progress('')
+                sys.stdout.write(msg + '\n')
+                sys.stdout.flush()
+        self.in_progress = None
+        self.in_progress_hanging = False
+
+    def show_progress(self, message=None):
+        """If we are in a progress scope, and no log messages have been
+        shown, write out another '.'"""
+        if self.in_progress_hanging:
+            if message is None:
+                sys.stdout.write('.')
+                sys.stdout.flush()
+            else:
+                if self.last_message:
+                    padding = ' ' * max(0, len(self.last_message)-len(message))
+                else:
+                    padding = ''
+                sys.stdout.write('\r%s%s%s%s' % (' '*self.indent, self.in_progress, message, padding))
+                sys.stdout.flush()
+                self.last_message = message
+
+    def stdout_level_matches(self, level):
+        """Returns true if a message at this level will go to stdout"""
+        return self.level_matches(level, self._stdout_level())
+
+    def _stdout_level(self):
+        """Returns the level that stdout runs at"""
+        for level, consumer in self.consumers:
+            if consumer is sys.stdout:
+                return level
+        return self.FATAL
+
+    def level_matches(self, level, consumer_level):
+        """
+        >>> l = Logger()
+        >>> l.level_matches(3, 4)
+        False
+        >>> l.level_matches(3, 2)
+        True
+        >>> l.level_matches(slice(None, 3), 3)
+        False
+        >>> l.level_matches(slice(None, 3), 2)
+        True
+        >>> l.level_matches(slice(1, 3), 1)
+        True
+        >>> l.level_matches(slice(2, 3), 1)
+        False
+        """
+        if isinstance(level, slice):
+            start, stop = level.start, level.stop
+            if start is not None and start > consumer_level:
+                return False
+            if stop is not None or stop <= consumer_level:
+                return False
+            return True
+        else:
+            return level >= consumer_level
+
+    @classmethod
+    def level_for_integer(cls, level):
+        levels = cls.LEVELS
+        if level < 0:
+            return levels[0]
+        if level >= len(levels):
+            return levels[-1]
+        return levels[level]
+
+    def move_stdout_to_stderr(self):
+        to_remove = []
+        to_add = []
+        for consumer_level, consumer in self.consumers:
+            if consumer == sys.stdout:
+                to_remove.append((consumer_level, consumer))
+                to_add.append((consumer_level, sys.stderr))
+        for item in to_remove:
+            self.consumers.remove(item)
+        self.consumers.extend(to_add)
+
+
+def call_subprocess(cmd, show_stdout=True,
+                    filter_stdout=None, cwd=None,
+                    raise_on_returncode=True,
+                    command_level=Logger.DEBUG, command_desc=None,
+                    extra_environ=None):
+    if command_desc is None:
+        cmd_parts = []
+        for part in cmd:
+            if ' ' in part or '\n' in part or '"' in part or "'" in part:
+                part = '"%s"' % part.replace('"', '\\"')
+            cmd_parts.append(part)
+        command_desc = ' '.join(cmd_parts)
+    if show_stdout:
+        stdout = None
+    else:
+        stdout = subprocess.PIPE
+    logger.log(command_level, "Running command %s" % command_desc)
+    env = os.environ.copy()
+    if extra_environ:
+        env.update(extra_environ)
+    try:
+        proc = subprocess.Popen(
+            cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
+            cwd=cwd, env=env)
+    except Exception, e:
+        logger.fatal(
+            "Error %s while executing command %s" % (e, command_desc))
+        raise
+    all_output = []
+    if stdout is not None:
+        stdout = proc.stdout
+        while 1:
+            line = stdout.readline()
+            if not line:
+                break
+            line = line.rstrip()
+            all_output.append(line + '\n')
+            if filter_stdout:
+                level = filter_stdout(line)
+                if isinstance(level, tuple):
+                    level, line = level
+                logger.log(level, line)
+                if not logger.stdout_level_matches(level):
+                    logger.show_progress()
+            else:
+                logger.info(line)
+    else:
+        returned_stdout, returned_stderr = proc.communicate()
+        all_output = [returned_stdout or '']
+    proc.wait()
+    if proc.returncode:
+        if raise_on_returncode:
+            if all_output:
+                logger.notify('Complete output from command %s:' % command_desc)
+                logger.notify('\n'.join(all_output) + '\n----------------------------------------')
+            raise InstallationError(
+                "Command %s failed with error code %s"
+                % (command_desc, proc.returncode))
+        else:
+            logger.warn(
+                "Command %s had error code %s"
+                % (command_desc, proc.returncode))
+    if stdout is not None:
+        return ''.join(all_output)
+
+############################################################
+## Utility functions
+
+def is_svn_page(html):
+    """Returns true if the page appears to be the index page of an svn repository"""
+    return (re.search(r'<title>[^<]*Revision \d+:', html)
+            and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
+
+def file_contents(filename):
+    fp = open(filename, 'rb')
+    try:
+        return fp.read()
+    finally:
+        fp.close()
+
+def split_leading_dir(path):
+    path = str(path)
+    path = path.lstrip('/').lstrip('\\')
+    if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
+                        or '\\' not in path):
+        return path.split('/', 1)
+    elif '\\' in path:
+        return path.split('\\', 1)
+    else:
+        return path, ''
+
+def has_leading_dir(paths):
+    """Returns true if all the paths have the same leading path name
+    (i.e., everything is in one subdirectory in an archive)"""
+    common_prefix = None
+    for path in paths:
+        prefix, rest = split_leading_dir(path)
+        if not prefix:
+            return False
+        elif common_prefix is None:
+            common_prefix = prefix
+        elif prefix != common_prefix:
+            return False
+    return True
+
+def format_size(bytes):
+    if bytes > 1000*1000:
+        return '%.1fMb' % (bytes/1000.0/1000)
+    elif bytes > 10*1000:
+        return '%iKb' % (bytes/1000)
+    elif bytes > 1000:
+        return '%.1fKb' % (bytes/1000.0)
+    else:
+        return '%ibytes' % bytes
+
+_normalize_re = re.compile(r'[^a-z]', re.I)
+
+def normalize_name(name):
+    return _normalize_re.sub('-', name.lower())
+
+def make_path_relative(path, rel_to):
+    """
+    Make a filename relative, where the filename path, and it is
+    relative to rel_to
+
+        >>> make_relative_path('/usr/share/something/a-file.pth',
+        ...                    '/usr/share/another-place/src/Directory')
+        '../../../something/a-file.pth'
+        >>> make_relative_path('/usr/share/something/a-file.pth',
+        ...                    '/home/user/src/Directory')
+        '../../../usr/share/something/a-file.pth'
+        >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
+        'a-file.pth'
+    """
+    path_filename = os.path.basename(path)
+    path = os.path.dirname(path)
+    path = os.path.normpath(os.path.abspath(path))
+    rel_to = os.path.normpath(os.path.abspath(rel_to))
+    path_parts = path.strip(os.path.sep).split(os.path.sep)
+    rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
+    while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
+        path_parts.pop(0)
+        rel_to_parts.pop(0)
+    full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
+    if full_parts == ['']:
+        return '.' + os.path.sep
+    return os.path.sep.join(full_parts)
+
+def display_path(path):
+    """Gives the display value for a given path, making it relative to cwd
+    if possible."""
+    path = os.path.normcase(os.path.abspath(path))
+    if path.startswith(os.getcwd() + os.path.sep):
+        path = '.' + path[len(os.getcwd()):]
+    return path
+
+def parse_editable(editable_req, default_vcs=None):
+    """Parses svn+http://blahblah@rev#egg=Foobar into a requirement
+    (Foobar) and a URL"""
+    url = editable_req
+    if os.path.isdir(url) and os.path.exists(os.path.join(url, 'setup.py')):
+        # Treating it as code that has already been checked out
+        url = filename_to_url(url)
+    if url.lower().startswith('file:'):
+        return None, url
+    for version_control in vcs:
+        if url.lower().startswith('%s:' % version_control):
+            url = '%s+%s' % (version_control, url)
+    if '+' not in url:
+        if default_vcs:
+            url = default_vcs + '+' + url
+        else:
+            raise InstallationError(
+                '--editable=%s should be formatted with svn+URL, git+URL, hg+URL or bzr+URL' % editable_req)
+    vc_type = url.split('+', 1)[0].lower()
+    if not vcs.get_backend(vc_type):
+        raise InstallationError(
+            'For --editable=%s only svn (svn+URL), Git (git+URL), Mercurial (hg+URL) and Bazaar (bzr+URL) is currently supported' % editable_req)
+    match = re.search(r'(?:#|#.*?&)egg=([^&]*)', editable_req)
+    if (not match or not match.group(1)) and vcs.get_backend(vc_type):
+        parts = [p for p in editable_req.split('#', 1)[0].split('/') if p]
+        if parts[-2] in ('tags', 'branches', 'tag', 'branch'):
+            req = parts[-3]
+        elif parts[-1] == 'trunk':
+            req = parts[-2]
+        else:
+            raise InstallationError(
+                '--editable=%s is not the right format; it must have #egg=Package'
+                % editable_req)
+    else:
+        req = match.group(1)
+    ## FIXME: use package_to_requirement?
+    match = re.search(r'^(.*?)(?:-dev|-\d.*)', req)
+    if match:
+        # Strip off -dev, -0.2, etc.
+        req = match.group(1)
+    return req, url
+
+def backup_dir(dir, ext='.bak'):
+    """Figure out the name of a directory to back up the given dir to
+    (adding .bak, .bak2, etc)"""
+    n = 1
+    extension = ext
+    while os.path.exists(dir + extension):
+        n += 1
+        extension = ext + str(n)
+    return dir + extension
+
+def ask(message, options):
+    """Ask the message interactively, with the given possible responses"""
+    while 1:
+        if os.environ.get('PIP_NO_INPUT'):
+            raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
+        response = raw_input(message)
+        response = response.strip().lower()
+        if response not in options:
+            print 'Your response (%r) was not one of the expected responses: %s' % (
+                response, ', '.join(options))
+        else:
+            return response
+
+def open_logfile_append(filename):
+    """Open the named log file in append mode.
+
+    If the file already exists, a separator will also be printed to
+    the file to separate past activity from current activity.
+    """
+    exists = os.path.exists(filename)
+    log_fp = open(filename, 'a')
+    if exists:
+        print >> log_fp, '-'*60
+        print >> log_fp, '%s run on %s' % (sys.argv[0], time.strftime('%c'))
+    return log_fp
+
+def is_url(name):
+    """Returns true if the name looks like a URL"""
+    if ':' not in name:
+        return False
+    scheme = name.split(':', 1)[0].lower()
+    return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
+
+def is_filename(name):
+    if (splitext(name)[1].lower() in ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle')
+        and os.path.exists(name)):
+        return True
+    if os.path.sep not in name and '/' not in name:
+        # Doesn't have any path components, probably a requirement like 'Foo'
+        return False
+    return True
+
+_drive_re = re.compile('^([a-z]):', re.I)
+_url_drive_re = re.compile('^([a-z])[:|]', re.I)
+
+def filename_to_url(filename):
+    """
+    Convert a path to a file: URL.  The path will be made absolute.
+    """
+    filename = os.path.normcase(os.path.abspath(filename))
+    if _drive_re.match(filename):
+        filename = filename[0] + '|' + filename[2:]
+    url = urllib.quote(filename)
+    url = url.replace(os.path.sep, '/')
+    url = url.lstrip('/')
+    return 'file:///' + url
+
+def filename_to_url2(filename):
+    """
+    Convert a path to a file: URL.  The path will be made absolute and have
+    quoted path parts.
+    """
+    filename = os.path.normcase(os.path.abspath(filename))
+    drive, filename = os.path.splitdrive(filename)
+    filepath = filename.split(os.path.sep)
+    url = '/'.join([urllib.quote(part) for part in filepath])
+    if not drive:
+        url = url.lstrip('/')
+    return 'file:///' + drive + url
+
+def url_to_filename(url):
+    """
+    Convert a file: URL to a path.
+    """
+    assert url.startswith('file:'), (
+        "You can only turn file: urls into filenames (not %r)" % url)
+    filename = url[len('file:'):].lstrip('/')
+    filename = urllib.unquote(filename)
+    if _url_drive_re.match(filename):
+        filename = filename[0] + ':' + filename[2:]
+    else:
+        filename = '/' + filename
+    return filename
+
+def get_requirement_from_url(url):
+    """Get a requirement from the URL, if possible.  This looks for #egg
+    in the URL"""
+    link = Link(url)
+    egg_info = link.egg_fragment
+    if not egg_info:
+        egg_info = splitext(link.filename)[0]
+    return package_to_requirement(egg_info)
+
+def package_to_requirement(package_name):
+    """Translate a name like Foo-1.2 to Foo==1.3"""
+    match = re.search(r'^(.*?)(-dev|-\d.*)', package_name)
+    if match:
+        name = match.group(1)
+        version = match.group(2)
+    else:
+        name = package_name
+        version = ''
+    if version:
+        return '%s==%s' % (name, version)
+    else:
+        return name
+
+def is_framework_layout(path):
+    """Return True if the current platform is the default Python of Mac OS X
+    which installs scripts in /usr/local/bin"""
+    return (sys.platform[:6] == 'darwin' and
+            (path[:9] == '/Library/' or path[:16] == '/System/Library/'))
+
+def strip_prefix(path, prefix):
+    """ If ``path`` begins with ``prefix``, return ``path`` with
+    ``prefix`` stripped off.  Otherwise return None."""
+    prefixes = [prefix]
+    # Yep, we are special casing the framework layout of MacPython here
+    if is_framework_layout(sys.prefix):
+        for location in ('/Library', '/usr/local'):
+            if path.startswith(location):
+                prefixes.append(location)
+    for prefix in prefixes:
+        if path.startswith(prefix):
+            return prefix, path.replace(prefix + os.path.sep, '')
+    return None, None
+
+class UninstallPathSet(object):
+    """A set of file paths to be removed in the uninstallation of a
+    requirement."""
+    def __init__(self, dist, restrict_to_prefix):
+        self.paths = set()
+        self._refuse = set()
+        self.pth = {}
+        self.prefix = os.path.normcase(os.path.realpath(restrict_to_prefix))
+        self.dist = dist
+        self.location = dist.location
+        self.save_dir = None
+        self._moved_paths = []
+
+    def _can_uninstall(self):
+        prefix, stripped = strip_prefix(self.location, self.prefix)
+        if not stripped:
+            logger.notify("Not uninstalling %s at %s, outside environment %s"
+                          % (self.dist.project_name, self.dist.location,
+                             self.prefix))
+            return False
+        return True
+
+    def add(self, path):
+        path = os.path.abspath(path)
+        if not os.path.exists(path):
+            return
+        prefix, stripped = strip_prefix(os.path.normcase(path), self.prefix)
+        if stripped:
+            self.paths.add((prefix, stripped))
+        else:
+            self._refuse.add((prefix, path))
+
+    def add_pth(self, pth_file, entry):
+        prefix, stripped = strip_prefix(os.path.normcase(pth_file), self.prefix)
+        if stripped:
+            entry = os.path.normcase(entry)
+            if stripped not in self.pth:
+                self.pth[stripped] = UninstallPthEntries(os.path.join(prefix, stripped))
+            self.pth[stripped].add(os.path.normcase(entry))
+        else:
+            self._refuse.add((prefix, pth_file))
+
+    def compact(self, paths):
+        """Compact a path set to contain the minimal number of paths
+        necessary to contain all paths in the set. If /a/path/ and
+        /a/path/to/a/file.txt are both in the set, leave only the
+        shorter path."""
+        short_paths = set()
+        def sort_set(x, y):
+            prefix_x, path_x = x
+            prefix_y, path_y = y
+            return cmp(len(path_x), len(path_y))
+        for prefix, path in sorted(paths, sort_set):
+            if not any([(path.startswith(shortpath) and
+                         path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
+                        for shortprefix, shortpath in short_paths]):
+                short_paths.add((prefix, path))
+        return short_paths
+
+    def remove(self, auto_confirm=False):
+        """Remove paths in ``self.paths`` with confirmation (unless
+        ``auto_confirm`` is True)."""
+        if not self._can_uninstall():
+            return
+        logger.notify('Uninstalling %s:' % self.dist.project_name)
+        logger.indent += 2
+        paths = sorted(self.compact(self.paths))
+        try:
+            if auto_confirm:
+                response = 'y'
+            else:
+                for prefix, path in paths:
+                    logger.notify(os.path.join(prefix, path))
+                response = ask('Proceed (y/n)? ', ('y', 'n'))
+            if self._refuse:
+                logger.notify('Not removing or modifying (outside of prefix):')
+                for prefix, path in self.compact(self._refuse):
+                    logger.notify(os.path.join(prefix, path))
+            if response == 'y':
+                self.save_dir = tempfile.mkdtemp('-uninstall', 'pip-')
+                for prefix, path in paths:
+                    full_path = os.path.join(prefix, path)
+                    new_path = os.path.join(self.save_dir, path)
+                    new_dir = os.path.dirname(new_path)
+                    logger.info('Removing file or directory %s' % full_path)
+                    self._moved_paths.append((prefix, path))
+                    os.renames(full_path, new_path)
+                for pth in self.pth.values():
+                    pth.remove()
+                logger.notify('Successfully uninstalled %s' % self.dist.project_name)
+
+        finally:
+            logger.indent -= 2
+
+    def rollback(self):
+        """Rollback the changes previously made by remove()."""
+        if self.save_dir is None:
+            logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name)
+            return False
+        logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
+        for prefix, path in self._moved_paths:
+            tmp_path = os.path.join(self.save_dir, path)
+            real_path = os.path.join(prefix, path)
+            logger.info('Replacing %s' % real_path)
+            os.renames(tmp_path, real_path)
+        for pth in self.pth:
+            pth.rollback()
+
+    def commit(self):
+        """Remove temporary save dir: rollback will no longer be possible."""
+        if self.save_dir is not None:
+            shutil.rmtree(self.save_dir)
+            self.save_dir = None
+            self._moved_paths = []
+
+
+class UninstallPthEntries(object):
+    def __init__(self, pth_file):
+        if not os.path.isfile(pth_file):
+            raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file)
+        self.file = pth_file
+        self.entries = set()
+        self._saved_lines = None
+
+    def add(self, entry):
+        self.entries.add(entry)
+
+    def remove(self):
+        logger.info('Removing pth entries from %s:' % self.file)
+        fh = open(self.file, 'r')
+        lines = fh.readlines()
+        self._saved_lines = lines
+        fh.close()
+        try:
+            for entry in self.entries:
+                logger.info('Removing entry: %s' % entry)
+            try:
+                lines.remove(entry + '\n')
+            except ValueError:
+                pass
+        finally:
+            pass
+        fh = open(self.file, 'w')
+        fh.writelines(lines)
+        fh.close()
+
+    def rollback(self):
+        if self._saved_lines is None:
+            logger.error('Cannot roll back changes to %s, none were made' % self.file)
+            return False
+        logger.info('Rolling %s back to previous state' % self.file)
+        fh = open(self.file, 'w')
+        fh.writelines(self._saved_lines)
+        fh.close()
+        return True
+
+class FakeFile(object):
+    """Wrap a list of lines in an object with readline() to make
+    ConfigParser happy."""
+    def __init__(self, lines):
+        self._gen = (l for l in lines)
+
+    def readline(self):
+        try:
+            return self._gen.next()
+        except StopIteration:
+            return ''
+    
+def splitext(path):
+    """Like os.path.splitext, but take off .tar too"""
+    base, ext = posixpath.splitext(path)
+    if base.lower().endswith('.tar'):
+        ext = base[-4:] + ext
+        base = base[:-4]
+    return base, ext
+
+def find_command(cmd, paths=None, pathext=None):
+    """Searches the PATH for the given command and returns its path"""
+    if paths is None:
+        paths = os.environ.get('PATH', []).split(os.pathsep)
+    if isinstance(paths, basestring):
+        paths = [paths]
+    # check if there are funny path extensions for executables, e.g. Windows
+    if pathext is None:
+        pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
+    pathext = [ext for ext in pathext.lower().split(os.pathsep)]
+    # don't use extensions if the command ends with one of them
+    if os.path.splitext(cmd)[1].lower() in pathext:
+        pathext = ['']
+    # check if we find the command on PATH
+    for path in paths:
+        # try without extension first
+        cmd_path = os.path.join(path, cmd)
+        for ext in pathext:
+            # then including the extension
+            cmd_path_ext = cmd_path + ext
+            if os.path.exists(cmd_path_ext):
+                return cmd_path_ext
+        if os.path.exists(cmd_path):
+            return cmd_path
+    return None
+
+class _Inf(object):
+    """I am bigger than everything!"""
+    def __cmp__(self, a):
+        if self is a:
+            return 0
+        return 1
+    def __repr__(self):
+        return 'Inf'
+Inf = _Inf()
+del _Inf
+
+if __name__ == '__main__':
+    exit = main()
+    if exit:
+        sys.exit(exit)
Binary file SEESenv/lib/python2.6/site-packages/pip-0.6.1-py2.6.egg/pip.pyc has changed
Binary file SEESenv/lib/python2.6/site-packages/setuptools-0.6c11-py2.6.egg has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/site-packages/setuptools.pth	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+./setuptools-0.6c11-py2.6.egg
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/site.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,694 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code.  Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path.  On
+Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
+appends lib/python<version>/site-packages as well as lib/site-python.
+It also supports the Debian convention of
+lib/python<version>/dist-packages.  On other platforms (mainly Mac and
+Windows), it uses just sys.prefix (and sys.exec_prefix, if different,
+but this is unlikely).  The resulting directories, if they exist, are
+appended to sys.path, and also inspected for path configuration files.
+
+FOR DEBIAN, this sys.path is augmented with directories in /usr/local.
+Local addons go into /usr/local/lib/python<version>/site-packages
+(resp. /usr/local/lib/site-python), Debian addons install into
+/usr/{lib,share}/python<version>/dist-packages.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path.  Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once.  Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.X/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth.  Assume foo.pth contains the
+following:
+
+  # foo package configuration
+  foo
+  bar
+  bletch
+
+and bar.pth contains:
+
+  # bar package configuration
+  bar
+
+Then the following directories are added to sys.path, in this order:
+
+  /usr/local/lib/python2.X/site-packages/bar
+  /usr/local/lib/python2.X/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations.  If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+import __builtin__
+try:
+    set
+except NameError:
+    from sets import Set as set
+
+# Prefixes for site-packages; add additional prefixes like /usr/local here
+PREFIXES = [sys.prefix, sys.exec_prefix]
+# Enable per user site-packages directory
+# set it to False to disable the feature or True to force the feature
+ENABLE_USER_SITE = None
+# for distutils.commands.install
+USER_SITE = None
+USER_BASE = None
+
+_is_jython = sys.platform[:4] == 'java'
+
+def makepath(*paths):
+    dir = os.path.join(*paths)
+    if _is_jython and (dir == '__classpath__' or
+                       dir.startswith('__pyclasspath__')):
+        return dir, dir
+    dir = os.path.abspath(dir)
+    return dir, os.path.normcase(dir)
+
+def abs__file__():
+    """Set all module' __file__ attribute to an absolute path"""
+    for m in sys.modules.values():
+        f = getattr(m, '__file__', None)
+        if f is None:
+            continue
+        m.__file__ = os.path.abspath(f)
+
+def removeduppaths():
+    """ Remove duplicate entries from sys.path along with making them
+    absolute"""
+    # This ensures that the initial path provided by the interpreter contains
+    # only absolute pathnames, even if we're running from the build directory.
+    L = []
+    known_paths = set()
+    for dir in sys.path:
+        # Filter out duplicate paths (on case-insensitive file systems also
+        # if they only differ in case); turn relative paths into absolute
+        # paths.
+        dir, dircase = makepath(dir)
+        if not dircase in known_paths:
+            L.append(dir)
+            known_paths.add(dircase)
+    sys.path[:] = L
+    return known_paths
+
+# XXX This should not be part of site.py, since it is needed even when
+# using the -S option for Python.  See http://www.python.org/sf/586680
+def addbuilddir():
+    """Append ./build/lib.<platform> in case we're running in the build dir
+    (especially for Guido :-)"""
+    from distutils.util import get_platform
+    s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+    if hasattr(sys, 'gettotalrefcount'):
+        s += '-pydebug'
+    s = os.path.join(os.path.dirname(sys.path[-1]), s)
+    sys.path.append(s)
+
+def _init_pathinfo():
+    """Return a set containing all existing directory entries from sys.path"""
+    d = set()
+    for dir in sys.path:
+        try:
+            if os.path.isdir(dir):
+                dir, dircase = makepath(dir)
+                d.add(dircase)
+        except TypeError:
+            continue
+    return d
+
+def addpackage(sitedir, name, known_paths):
+    """Add a new path to known_paths by combining sitedir and 'name' or execute
+    sitedir if it starts with 'import'"""
+    if known_paths is None:
+        _init_pathinfo()
+        reset = 1
+    else:
+        reset = 0
+    fullname = os.path.join(sitedir, name)
+    try:
+        f = open(fullname, "rU")
+    except IOError:
+        return
+    try:
+        for line in f:
+            if line.startswith("#"):
+                continue
+            if line.startswith("import"):
+                exec line
+                continue
+            line = line.rstrip()
+            dir, dircase = makepath(sitedir, line)
+            if not dircase in known_paths and os.path.exists(dir):
+                sys.path.append(dir)
+                known_paths.add(dircase)
+    finally:
+        f.close()
+    if reset:
+        known_paths = None
+    return known_paths
+
+def addsitedir(sitedir, known_paths=None):
+    """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+    'sitedir'"""
+    if known_paths is None:
+        known_paths = _init_pathinfo()
+        reset = 1
+    else:
+        reset = 0
+    sitedir, sitedircase = makepath(sitedir)
+    if not sitedircase in known_paths:
+        sys.path.append(sitedir)        # Add path component
+    try:
+        names = os.listdir(sitedir)
+    except os.error:
+        return
+    names.sort()
+    for name in names:
+        if name.endswith(os.extsep + "pth"):
+            addpackage(sitedir, name, known_paths)
+    if reset:
+        known_paths = None
+    return known_paths
+
+def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):
+    """Add site-packages (and possibly site-python) to sys.path"""
+    prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]
+    if exec_prefix != sys_prefix:
+        prefixes.append(os.path.join(exec_prefix, "local"))
+
+    for prefix in prefixes:
+        if prefix:
+            if sys.platform in ('os2emx', 'riscos') or _is_jython:
+                sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
+            elif sys.platform == 'darwin' and prefix == sys_prefix:
+
+                if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python
+
+                    sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+                                os.path.join(prefix, "Extras", "lib", "python")]
+
+                else: # any other Python distros on OSX work this way
+                    sitedirs = [os.path.join(prefix, "lib",
+                                             "python" + sys.version[:3], "site-packages")]
+
+            elif os.sep == '/':
+                sitedirs = [os.path.join(prefix,
+                                         "lib",
+                                         "python" + sys.version[:3],
+                                         "site-packages"),
+                            os.path.join(prefix, "lib", "site-python"),
+                            os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
+                lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
+                if (os.path.exists(lib64_dir) and 
+                    os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
+                    sitedirs.append(lib64_dir)
+                try:
+                    # sys.getobjects only available in --with-pydebug build
+                    sys.getobjects
+                    sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
+                except AttributeError:
+                    pass
+                # Debian-specific dist-packages directories:
+                sitedirs.append(os.path.join(prefix, "lib",
+                                             "python" + sys.version[:3],
+                                             "dist-packages"))
+                sitedirs.append(os.path.join(prefix, "local/lib",
+                                             "python" + sys.version[:3],
+                                             "dist-packages"))
+                sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
+            else:
+                sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
+            if sys.platform == 'darwin':
+                # for framework builds *only* we add the standard Apple
+                # locations. Currently only per-user, but /Library and
+                # /Network/Library could be added too
+                if 'Python.framework' in prefix:
+                    home = os.environ.get('HOME')
+                    if home:
+                        sitedirs.append(
+                            os.path.join(home,
+                                         'Library',
+                                         'Python',
+                                         sys.version[:3],
+                                         'site-packages'))
+            for sitedir in sitedirs:
+                if os.path.isdir(sitedir):
+                    addsitedir(sitedir, known_paths)
+    return None
+
+def check_enableusersite():
+    """Check if user site directory is safe for inclusion
+
+    The function tests for the command line flag (including environment var),
+    process uid/gid equal to effective uid/gid.
+
+    None: Disabled for security reasons
+    False: Disabled by user (command line option)
+    True: Safe and enabled
+    """
+    if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):
+        return False
+
+    if hasattr(os, "getuid") and hasattr(os, "geteuid"):
+        # check process uid == effective uid
+        if os.geteuid() != os.getuid():
+            return None
+    if hasattr(os, "getgid") and hasattr(os, "getegid"):
+        # check process gid == effective gid
+        if os.getegid() != os.getgid():
+            return None
+
+    return True
+
+def addusersitepackages(known_paths):
+    """Add a per user site-package to sys.path
+
+    Each user has its own python directory with site-packages in the
+    home directory.
+
+    USER_BASE is the root directory for all Python versions
+
+    USER_SITE is the user specific site-packages directory
+
+    USER_SITE/.. can be used for data.
+    """
+    global USER_BASE, USER_SITE, ENABLE_USER_SITE
+    env_base = os.environ.get("PYTHONUSERBASE", None)
+
+    def joinuser(*args):
+        return os.path.expanduser(os.path.join(*args))
+
+    #if sys.platform in ('os2emx', 'riscos'):
+    #    # Don't know what to put here
+    #    USER_BASE = ''
+    #    USER_SITE = ''
+    if os.name == "nt":
+        base = os.environ.get("APPDATA") or "~"
+        if env_base:
+            USER_BASE = env_base
+        else:
+            USER_BASE = joinuser(base, "Python")
+        USER_SITE = os.path.join(USER_BASE,
+                                 "Python" + sys.version[0] + sys.version[2],
+                                 "site-packages")
+    else:
+        if env_base:
+            USER_BASE = env_base
+        else:
+            USER_BASE = joinuser("~", ".local")
+        USER_SITE = os.path.join(USER_BASE, "lib",
+                                 "python" + sys.version[:3],
+                                 "site-packages")
+
+    if ENABLE_USER_SITE and os.path.isdir(USER_SITE):
+        addsitedir(USER_SITE, known_paths)
+    if ENABLE_USER_SITE:
+        for dist_libdir in ("lib", "local/lib"):
+            user_site = os.path.join(USER_BASE, dist_libdir,
+                                     "python" + sys.version[:3],
+                                     "dist-packages")
+            if os.path.isdir(user_site):
+                addsitedir(user_site, known_paths)
+    return known_paths
+
+
+
+def setBEGINLIBPATH():
+    """The OS/2 EMX port has optional extension modules that do double duty
+    as DLLs (and must use the .DLL file extension) for other extensions.
+    The library search path needs to be amended so these will be found
+    during module import.  Use BEGINLIBPATH so that these are at the start
+    of the library search path.
+
+    """
+    dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+    libpath = os.environ['BEGINLIBPATH'].split(';')
+    if libpath[-1]:
+        libpath.append(dllpath)
+    else:
+        libpath[-1] = dllpath
+    os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+    """Define new built-ins 'quit' and 'exit'.
+    These are simply strings that display a hint on how to exit.
+
+    """
+    if os.sep == ':':
+        eof = 'Cmd-Q'
+    elif os.sep == '\\':
+        eof = 'Ctrl-Z plus Return'
+    else:
+        eof = 'Ctrl-D (i.e. EOF)'
+
+    class Quitter(object):
+        def __init__(self, name):
+            self.name = name
+        def __repr__(self):
+            return 'Use %s() or %s to exit' % (self.name, eof)
+        def __call__(self, code=None):
+            # Shells like IDLE catch the SystemExit, but listen when their
+            # stdin wrapper is closed.
+            try:
+                sys.stdin.close()
+            except:
+                pass
+            raise SystemExit(code)
+    __builtin__.quit = Quitter('quit')
+    __builtin__.exit = Quitter('exit')
+
+
+class _Printer(object):
+    """interactive prompt objects for printing the license text, a list of
+    contributors and the copyright notice."""
+
+    MAXLINES = 23
+
+    def __init__(self, name, data, files=(), dirs=()):
+        self.__name = name
+        self.__data = data
+        self.__files = files
+        self.__dirs = dirs
+        self.__lines = None
+
+    def __setup(self):
+        if self.__lines:
+            return
+        data = None
+        for dir in self.__dirs:
+            for filename in self.__files:
+                filename = os.path.join(dir, filename)
+                try:
+                    fp = file(filename, "rU")
+                    data = fp.read()
+                    fp.close()
+                    break
+                except IOError:
+                    pass
+            if data:
+                break
+        if not data:
+            data = self.__data
+        self.__lines = data.split('\n')
+        self.__linecnt = len(self.__lines)
+
+    def __repr__(self):
+        self.__setup()
+        if len(self.__lines) <= self.MAXLINES:
+            return "\n".join(self.__lines)
+        else:
+            return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+    def __call__(self):
+        self.__setup()
+        prompt = 'Hit Return for more, or q (and Return) to quit: '
+        lineno = 0
+        while 1:
+            try:
+                for i in range(lineno, lineno + self.MAXLINES):
+                    print self.__lines[i]
+            except IndexError:
+                break
+            else:
+                lineno += self.MAXLINES
+                key = None
+                while key is None:
+                    key = raw_input(prompt)
+                    if key not in ('', 'q'):
+                        key = None
+                if key == 'q':
+                    break
+
+def setcopyright():
+    """Set 'copyright' and 'credits' in __builtin__"""
+    __builtin__.copyright = _Printer("copyright", sys.copyright)
+    if _is_jython:
+        __builtin__.credits = _Printer(
+            "credits",
+            "Jython is maintained by the Jython developers (www.jython.org).")
+    else:
+        __builtin__.credits = _Printer("credits", """\
+    Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+    for supporting Python development.  See www.python.org for more information.""")
+    here = os.path.dirname(os.__file__)
+    __builtin__.license = _Printer(
+        "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+        ["LICENSE.txt", "LICENSE"],
+        [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+    """Define the built-in 'help'.
+    This is a wrapper around pydoc.help (with a twist).
+
+    """
+
+    def __repr__(self):
+        return "Type help() for interactive help, " \
+               "or help(object) for help about object."
+    def __call__(self, *args, **kwds):
+        import pydoc
+        return pydoc.help(*args, **kwds)
+
+def sethelper():
+    __builtin__.help = _Helper()
+
+def aliasmbcs():
+    """On Windows, some default encodings are not provided by Python,
+    while they are always available as "mbcs" in each locale. Make
+    them usable by aliasing to "mbcs" in such a case."""
+    if sys.platform == 'win32':
+        import locale, codecs
+        enc = locale.getdefaultlocale()[1]
+        if enc.startswith('cp'):            # "cp***" ?
+            try:
+                codecs.lookup(enc)
+            except LookupError:
+                import encodings
+                encodings._cache[enc] = encodings._unknown
+                encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+    """Set the string encoding used by the Unicode implementation.  The
+    default is 'ascii', but if you're willing to experiment, you can
+    change this."""
+    encoding = "ascii" # Default value set by _PyUnicode_Init()
+    if 0:
+        # Enable to support locale aware default string encodings.
+        import locale
+        loc = locale.getdefaultlocale()
+        if loc[1]:
+            encoding = loc[1]
+    if 0:
+        # Enable to switch off string to Unicode coercion and implicit
+        # Unicode to string conversion.
+        encoding = "undefined"
+    if encoding != "ascii":
+        # On Non-Unicode builds this will raise an AttributeError...
+        sys.setdefaultencoding(encoding) # Needs Python Unicode build !
+
+
+def execsitecustomize():
+    """Run custom site specific code, if available."""
+    try:
+        import sitecustomize
+    except ImportError:
+        pass
+
+def virtual_install_main_packages():
+    f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))
+    sys.real_prefix = f.read().strip()
+    f.close()
+    pos = 2
+    if sys.path[0] == '':
+        pos += 1
+    if sys.platform == 'win32':
+        paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]
+    elif _is_jython:
+        paths = [os.path.join(sys.real_prefix, 'Lib')]
+    else:
+        paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]
+        lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
+        if os.path.exists(lib64_path):
+            paths.append(lib64_path)
+        # This is hardcoded in the Python executable, but relative to sys.prefix:
+        plat_path = os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3],
+                                 'plat-%s' % sys.platform)
+        if os.path.exists(plat_path):
+            paths.append(plat_path)
+    # This is hardcoded in the Python executable, but
+    # relative to sys.prefix, so we have to fix up:
+    for path in list(paths):
+        tk_dir = os.path.join(path, 'lib-tk')
+        if os.path.exists(tk_dir):
+            paths.append(tk_dir)
+
+    # These are hardcoded in the Apple's Python executable,
+    # but relative to sys.prefix, so we have to fix them up:
+    if sys.platform == 'darwin':
+        hardcoded_paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3], module)
+                           for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]
+
+        for path in hardcoded_paths:
+            if os.path.exists(path):
+                paths.append(path)
+
+    sys.path.extend(paths)
+
+def force_global_eggs_after_local_site_packages():
+    """
+    Force easy_installed eggs in the global environment to get placed
+    in sys.path after all packages inside the virtualenv.  This
+    maintains the "least surprise" result that packages in the
+    virtualenv always mask global packages, never the other way
+    around.
+    
+    """
+    egginsert = getattr(sys, '__egginsert', 0)
+    for i, path in enumerate(sys.path):
+        if i > egginsert and path.startswith(sys.prefix):
+            egginsert = i
+    sys.__egginsert = egginsert + 1
+    
+def virtual_addsitepackages(known_paths):
+    force_global_eggs_after_local_site_packages()
+    return addsitepackages(known_paths, sys_prefix=sys.real_prefix)
+
+def fixclasspath():
+    """Adjust the special classpath sys.path entries for Jython. These
+    entries should follow the base virtualenv lib directories.
+    """
+    paths = []
+    classpaths = []
+    for path in sys.path:
+        if path == '__classpath__' or path.startswith('__pyclasspath__'):
+            classpaths.append(path)
+        else:
+            paths.append(path)
+    sys.path = paths
+    sys.path.extend(classpaths)
+
+def execusercustomize():
+    """Run custom user specific code, if available."""
+    try:
+        import usercustomize
+    except ImportError:
+        pass
+
+
+def main():
+    global ENABLE_USER_SITE
+    virtual_install_main_packages()
+    abs__file__()
+    paths_in_sys = removeduppaths()
+    if (os.name == "posix" and sys.path and
+        os.path.basename(sys.path[-1]) == "Modules"):
+        addbuilddir()
+    if _is_jython:
+        fixclasspath()
+    GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))
+    if not GLOBAL_SITE_PACKAGES:
+        ENABLE_USER_SITE = False
+    if ENABLE_USER_SITE is None:
+        ENABLE_USER_SITE = check_enableusersite()
+    paths_in_sys = addsitepackages(paths_in_sys)
+    paths_in_sys = addusersitepackages(paths_in_sys)
+    if GLOBAL_SITE_PACKAGES:
+        paths_in_sys = virtual_addsitepackages(paths_in_sys)
+    if sys.platform == 'os2emx':
+        setBEGINLIBPATH()
+    setquit()
+    setcopyright()
+    sethelper()
+    aliasmbcs()
+    setencoding()
+    execsitecustomize()
+    if ENABLE_USER_SITE:
+        execusercustomize()
+    # Remove sys.setdefaultencoding() so that users cannot change the
+    # encoding after initialization.  The test for presence is needed when
+    # this module is run as a script, because this code is executed twice.
+    if hasattr(sys, "setdefaultencoding"):
+        del sys.setdefaultencoding
+
+main()
+
+def _script():
+    help = """\
+    %s [--user-base] [--user-site]
+
+    Without arguments print some useful information
+    With arguments print the value of USER_BASE and/or USER_SITE separated
+    by '%s'.
+
+    Exit codes with --user-base or --user-site:
+      0 - user site directory is enabled
+      1 - user site directory is disabled by user
+      2 - uses site directory is disabled by super user
+          or for security reasons
+     >2 - unknown error
+    """
+    args = sys.argv[1:]
+    if not args:
+        print "sys.path = ["
+        for dir in sys.path:
+            print "    %r," % (dir,)
+        print "]"
+        def exists(path):
+            if os.path.isdir(path):
+                return "exists"
+            else:
+                return "doesn't exist"
+        print "USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE))
+        print "USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE))
+        print "ENABLE_USER_SITE: %r" %  ENABLE_USER_SITE
+        sys.exit(0)
+
+    buffer = []
+    if '--user-base' in args:
+        buffer.append(USER_BASE)
+    if '--user-site' in args:
+        buffer.append(USER_SITE)
+
+    if buffer:
+        print os.pathsep.join(buffer)
+        if ENABLE_USER_SITE:
+            sys.exit(0)
+        elif ENABLE_USER_SITE is False:
+            sys.exit(1)
+        elif ENABLE_USER_SITE is None:
+            sys.exit(2)
+        else:
+            sys.exit(3)
+    else:
+        import textwrap
+        print textwrap.dedent(help % (sys.argv[0], os.pathsep))
+        sys.exit(10)
+
+if __name__ == '__main__':
+    _script()
Binary file SEESenv/lib/python2.6/site.pyc has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/sre.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/sre.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/sre.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/sre.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/sre_compile.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/sre_compile.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/sre_compile.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/sre_compile.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/sre_constants.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/sre_constants.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/sre_constants.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/sre_constants.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/sre_parse.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/sre_parse.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/sre_parse.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/sre_parse.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/stat.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/stat.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/stat.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/stat.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/types.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/types.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/types.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/types.pyc
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/warnings.py	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/warnings.py
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/SEESenv/lib/python2.6/warnings.pyc	Sat Feb 13 12:29:22 2010 +0530
@@ -0,0 +1,1 @@
+/usr/lib/python2.6/warnings.pyc
\ No newline at end of file
--- a/SEESenv/web/hgbook/comments/views.py	Fri Feb 12 01:11:21 2010 +0530
+++ b/SEESenv/web/hgbook/comments/views.py	Sat Feb 13 12:29:22 2010 +0530
@@ -10,7 +10,7 @@
 from p_list import *
 from BeautifulSoup import BeautifulSoup
 import glob
-html_folder='/home/amit/SEES-hacks/web/html/'
+html_folder='/home/hg/repos/SEES-hacks/SEESenv/web/html/'
 
 def sort_dict(dict):
 	new_dict = {}
@@ -49,9 +49,9 @@
 
 def index(request):
 	html_files = glob.glob(html_folder+'ch*.html')
-	print >> sys.stderr ,html_files	
-	print >> sys.stderr ,"just checking whether i got here"	
-	print >> sys.stderr , html_files	
+#	print >> sys.stderr ,html_files	
+#	print >> sys.stderr ,"just checking whether i got here"	
+#	print >> sys.stderr , html_files	
 	html_files.sort()
 	link_list=['/review/html/'+a.split('/')[-1] for a in html_files]
 #	print >> sys.stderr , html_files        
--- a/SEESenv/web/hgbook/comments/views.py~	Fri Feb 12 01:11:21 2010 +0530
+++ b/SEESenv/web/hgbook/comments/views.py~	Sat Feb 13 12:29:22 2010 +0530
@@ -10,7 +10,7 @@
 from p_list import *
 from BeautifulSoup import BeautifulSoup
 import glob
-html_folder='/home/amit/SEES-hacks/web/html'
+html_folder='/home/hg/repos/SEES-hacks/SEESenv/web/html/'
 
 def sort_dict(dict):
 	new_dict = {}
--- a/SEESenv/web/hgbook/run.wsgi	Fri Feb 12 01:11:21 2010 +0530
+++ b/SEESenv/web/hgbook/run.wsgi	Sat Feb 13 12:29:22 2010 +0530
@@ -1,8 +1,28 @@
+ALLDIRS = ['/home/hg/repos/SEES-hacks/SEESenv/lib/python2.6/site-packages']
+
+import sys 
+import site 
+
+# Remember original sys.path.
+prev_sys_path = list(sys.path) 
+
+# Add each new site-packages directory.
+for directory in ALLDIRS:
+  site.addsitedir(directory)
+
+# Reorder sys.path so new directories at the front.
+new_sys_path = [] 
+for item in list(sys.path): 
+    if item not in prev_sys_path: 
+        new_sys_path.append(item) 
+        sys.path.remove(item) 
+sys.path[:0] = new_sys_path 
+
 import os
 import sys
 import django
-sys.path.append('/'.join(os.getcwd().split('/')[:-1])')
-sys.path.append(os.path.join(os.getcwd(),'comments'))
+sys.path.append('/home/hg/repos/SEES-hacks/SEESenv/web/')
+sys.path.append('/home/hg/repos/SEES-hacks/SEESenv/web/hgbook/comments')
 os.environ['DJANGO_SETTINGS_MODULE'] = 'hgbook.settings'
 import django.core.handlers.wsgi
 application = django.core.handlers.wsgi.WSGIHandler()
--- a/SEESenv/web/hgbook/run.wsgi~	Fri Feb 12 01:11:21 2010 +0530
+++ b/SEESenv/web/hgbook/run.wsgi~	Sat Feb 13 12:29:22 2010 +0530
@@ -1,8 +1,28 @@
+ALLDIRS = ['/home/amit/SEES-hacks/SEESenv/lib/python2.6/site-packages']
+
+import sys 
+import site 
+
+# Remember original sys.path.
+prev_sys_path = list(sys.path) 
+
+# Add each new site-packages directory.
+for directory in ALLDIRS:
+  site.addsitedir(directory)
+
+# Reorder sys.path so new directories at the front.
+new_sys_path = [] 
+for item in list(sys.path): 
+    if item not in prev_sys_path: 
+        new_sys_path.append(item) 
+        sys.path.remove(item) 
+sys.path[:0] = new_sys_path 
+
 import os
 import sys
 import django
-sys.path.append('/'.join(os.getcwd().split('/')[:-1])')
-sys.path.append(os.join(os.getcwd().split('/'),comments))
+sys.path.append('/home/amit/SEES-hacks/SEESenv/web/')
+sys.path.append('/home/amit/SEES-hacks/SEESenv/web/hgbook/comments')
 os.environ['DJANGO_SETTINGS_MODULE'] = 'hgbook.settings'
 import django.core.handlers.wsgi
 application = django.core.handlers.wsgi.WSGIHandler()