summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNicolás Reynolds <fauno@kiwwwi.com.ar>2011-12-04 23:07:59 -0300
committerNicolás Reynolds <fauno@kiwwwi.com.ar>2011-12-04 23:07:59 -0300
commit988c2c6d400b3721464c2891891807d504b076b0 (patch)
tree013dc3d676edad701d0d3bbd239bc2df0cdf76b4
parent60a1fc6cc4cef0b9eed58ea4f0ca003b76ec382a (diff)
parent183c4d9cefa95f46c3fa3a6936f837542426eac2 (diff)
Merge branch 'master' of ssh://gparabola/parabolaweb
Conflicts: local_settings.py.example media/archweb.css packages/templatetags/package_extras.py public/views.py templates/packages/details.html templates/packages/flag.html templates/packages/flag_confirmed.html templates/packages/flagged.html templates/packages/search.html templates/public/download.html templates/todolists/view.html
-rw-r--r--README85
-rw-r--r--TODO4
-rw-r--r--bin/activate76
-rw-r--r--bin/activate.csh32
-rw-r--r--bin/activate.fish79
-rw-r--r--bin/activate_this.py32
-rwxr-xr-xbin/easy_install10
-rwxr-xr-xbin/easy_install-2.710
-rwxr-xr-xbin/pip10
-rwxr-xr-xbin/pip-2.710
-rwxr-xr-xbin/pythonbin3120 -> 0 bytes
-rwxr-xr-xbin/python2bin3120 -> 0 bytes
-rw-r--r--devel/admin.py12
-rw-r--r--devel/management/commands/generate_keyring.py86
-rw-r--r--devel/management/commands/rematch_packager.py2
-rw-r--r--devel/management/commands/reporead.py286
-rwxr-xr-xdevel/management/commands/reporead_inotify.py211
-rw-r--r--devel/migrations/0001_initial.py18
-rw-r--r--devel/migrations/0002_auto__add_masterkey.py76
-rw-r--r--devel/migrations/__init__.py0
-rw-r--r--devel/models.py20
-rw-r--r--devel/utils.py16
-rw-r--r--devel/views.py22
l---------lib/python2.7/UserDict.py1
l---------lib/python2.7/_abcoll.py1
l---------lib/python2.7/_weakrefset.py1
l---------lib/python2.7/abc.py1
l---------lib/python2.7/codecs.py1
l---------lib/python2.7/config1
l---------lib/python2.7/copy_reg.py1
-rw-r--r--lib/python2.7/distutils/__init__.py91
-rw-r--r--lib/python2.7/distutils/distutils.cfg6
l---------lib/python2.7/encodings1
l---------lib/python2.7/fnmatch.py1
l---------lib/python2.7/genericpath.py1
l---------lib/python2.7/lib-dynload1
l---------lib/python2.7/linecache.py1
l---------lib/python2.7/locale.py1
l---------lib/python2.7/ntpath.py1
-rw-r--r--lib/python2.7/orig-prefix.txt1
l---------lib/python2.7/os.py1
l---------lib/python2.7/posixpath.py1
l---------lib/python2.7/re.py1
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO531
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt84
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt1
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt61
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt4
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe1
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py5
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py2693
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py104
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py208
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exebin7168 -> 0 bytes
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py22
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py82
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py540
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py82
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py41
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py294
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py268
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py141
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py1865
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py457
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py124
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py123
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py82
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py53
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py10
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py82
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py25
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py252
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py164
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py180
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py183
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py178
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py246
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py816
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py36
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exebin7168 -> 0 bytes
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py830
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py282
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py370
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py2679
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py48
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py20
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py82
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py243
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py112
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py565
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py66
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py65
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py82
-rw-r--r--lib/python2.7/site-packages/easy-install.pth4
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO348
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt57
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt1
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt4
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe1
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt1
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py261
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py589
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py55
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py203
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py231
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py1
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py33
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py60
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py109
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py32
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py247
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py116
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py42
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py9
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py346
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py470
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py17
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py686
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py45
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py181
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py1432
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py18
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py479
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py238
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py138
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py204
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py162
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py260
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py53
-rw-r--r--lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info9
-rw-r--r--lib/python2.7/site-packages/setuptools.pth1
-rw-r--r--lib/python2.7/site.py713
l---------lib/python2.7/sre.py1
l---------lib/python2.7/sre_compile.py1
l---------lib/python2.7/sre_constants.py1
l---------lib/python2.7/sre_parse.py1
l---------lib/python2.7/stat.py1
l---------lib/python2.7/types.py1
l---------lib/python2.7/warnings.py1
-rw-r--r--local_settings.py.example47
-rw-r--r--main/admin.py5
-rw-r--r--main/fields.py42
-rw-r--r--main/fixtures/arches.json8
-rw-r--r--main/fixtures/groups.json22
-rw-r--r--main/middleware.py2
-rw-r--r--main/migrations/0054_auto__add_field_donor_created.py157
-rw-r--r--main/migrations/0055_unique_package_in_repo.py155
-rw-r--r--main/migrations/0056_auto__chg_field_package_pkgdesc.py153
-rw-r--r--main/migrations/0057_auto__add_field_userprofile_latin_name.py153
-rw-r--r--main/models.py118
-rw-r--r--main/templatetags/cdn.py7
-rw-r--r--main/templatetags/pgp.py20
-rw-r--r--media/CP_EN_BK_S_001.gifbin0 -> 3036 bytes
l---------media/admin_media2
-rw-r--r--media/archnavbar/archnavbar.css9
-rw-r--r--media/archweb.css106
-rw-r--r--media/archweb.js193
-rw-r--r--media/d3.js4148
-rw-r--r--media/d3.layout.js1890
-rw-r--r--media/d3.layout.min.js1
-rw-r--r--media/d3.min.js2
-rw-r--r--media/devs/silhouette.png (renamed from media/silhouette.png)bin33090 -> 33090 bytes
-rw-r--r--media/donate.gifbin2951 -> 0 bytes
-rw-r--r--media/logos/apple-touch-icon-114x114.pngbin0 -> 3240 bytes
-rw-r--r--media/logos/apple-touch-icon-57x57.pngbin0 -> 1638 bytes
-rw-r--r--media/logos/apple-touch-icon-72x72.pngbin0 -> 2076 bytes
-rw-r--r--media/logos/archlinux-logo-only.svg45
-rw-r--r--media/visualize.js129
-rw-r--r--mirrors/admin.py5
-rw-r--r--mirrors/utils.py3
-rw-r--r--mirrors/views.py2
-rw-r--r--news/views.py2
-rw-r--r--packages/management/__init__.py0
-rw-r--r--packages/management/commands/__init__.py0
-rw-r--r--packages/management/commands/populate_signoffs.py88
-rw-r--r--packages/management/commands/signoff_report.py125
-rw-r--r--packages/migrations/0010_auto__add_signoffspecification.py183
-rw-r--r--packages/migrations/0011_auto__chg_field_signoffspecification_user.py165
-rw-r--r--packages/models.py104
-rw-r--r--packages/templatetags/package_extras.py43
-rw-r--r--packages/urls.py3
-rw-r--r--packages/utils.py237
-rw-r--r--packages/views.py608
-rw-r--r--packages/views/__init__.py269
-rw-r--r--packages/views/flag.py121
-rw-r--r--packages/views/search.py168
-rw-r--r--packages/views/signoff.py193
-rw-r--r--public/utils.py2
-rw-r--r--public/views.py38
-rw-r--r--releng/views.py2
-rw-r--r--requirements.txt3
-rw-r--r--requirements_prod.txt3
-rw-r--r--settings.py5
-rw-r--r--sitemaps.py39
-rw-r--r--templates/base.html12
-rw-r--r--templates/devel/clock.html2
-rw-r--r--templates/devel/index.html62
-rw-r--r--templates/devel/profile.html2
-rw-r--r--templates/packages/details.html9
-rw-r--r--templates/packages/differences.html31
-rw-r--r--templates/packages/files.html2
-rw-r--r--templates/packages/flag.html11
-rw-r--r--templates/packages/flag_confirmed.html6
-rw-r--r--templates/packages/flagged.html5
-rw-r--r--templates/packages/flaghelp.html5
-rw-r--r--templates/packages/search.html22
-rw-r--r--templates/packages/signoff_cell.html25
-rw-r--r--templates/packages/signoff_options.html18
-rw-r--r--templates/packages/signoff_report.txt41
-rw-r--r--templates/packages/signoffs.html86
-rw-r--r--templates/public/about.html3
-rw-r--r--templates/public/art.html2
-rw-r--r--templates/public/developer_list.html4
-rw-r--r--templates/public/donate.html2
-rw-r--r--templates/public/download.html49
-rw-r--r--templates/public/feeds.html4
-rw-r--r--templates/public/https.html5
-rw-r--r--templates/public/index.html42
-rw-r--r--templates/public/keys.html57
-rw-r--r--templates/public/svn.html2
-rw-r--r--templates/public/userlist.html1
-rw-r--r--templates/releng/add.html22
-rw-r--r--templates/todolists/email_notification.txt9
-rw-r--r--templates/todolists/view.html4
-rw-r--r--templates/visualize/index.html43
-rw-r--r--todolists/views.py10
-rw-r--r--urls.py22
-rw-r--r--visualize/__init__.py0
-rw-r--r--visualize/models.py0
-rw-r--r--visualize/tests.py0
-rw-r--r--visualize/urls.py9
-rw-r--r--visualize/views.py69
232 files changed, 10136 insertions, 24944 deletions
diff --git a/README b/README
index 0d3ee082..9cf699b3 100644
--- a/README
+++ b/README
@@ -1,4 +1,4 @@
-# Archweb README
+# Parabolaweb README
To get a pretty version of this document, run
@@ -19,55 +19,84 @@ See AUTHORS file.
# Python dependencies
-More detail in `requirements.txt` and `requirements_prod.txt`; it is best to
-use virtualenv and pip to handle these. But if you insist on (Arch Linux)
-packages, you will probably want the following:
-
-- mysql-python or python-pysqlite
-- django
-- python-markdown
-- python-south
-- python-memcached
-
+We're going to use pip to handle python dependencies, m'kay?
+Worry about that in step 3.
+
# Testing Installation
+Throughout this, we assume that parabolaweb is installed in a
+directory called `parabolaweb`. This is not necessarily true. On the
+main server, it's in `/srv/http/web'. Wherever you see this in a
+command, know that you should just replace it with the correct path
+for your install.
+
1. Run `virtualenv2`.
- $ cd /path/to/archweb && virtualenv2 ../archweb-env
+ $ cd /path/to/parabolaweb && virtualenv2 `pwd`-env
+
+ Here I just had you use `pwd` to choose the environment
+ directory. You can use anything, but it is recommended that it not
+ be the same directory as the install.
2. Activate the virtualenv.
- $ source ../archweb-env/bin/activate
+ $ source `pwd`-env/bin/activate
+
+3. Fix symlink to the environment
+
+ (parabolaweb-env) $ ln -sf ../../parabolaweb-env/lib/python2.7/site-packages/django/contrib/admin/media media/admin_media
-2. Install dependencies through `pip`.
+ Of course change `../../parabolaweb-env` to the relative path to
+ your environment. Keep in mind that the path is relative from
+ inside the `media/` directory, not the current directory.
- (archweb-env) $ pip install -r requirements.txt
+4. Install dependencies through `pip`.
-3. Copy `local_settings.py.example` to `local_settings.py` and modify.
+ To install base dependencies, run
+ (parabolweb-env) $ pip install -r requirements.txt
+
+ After that you will need to install a database engine for python.
+ This means `MySQL-python==1.2.3`, `pysqlite` or `psycopg2` for PostgreSQL.
+ Eg:
+ (parabolweb-env) $ pip install MySQL-python==1.2.3
+
+ You may also want to install memcached
+ (parabolweb-env) $ pip install python-memcached==1.47
+
+ Alternately, to have MySQL and memcached installed automatically, run
+ (parabolweb-env) $ pip install -r requirements_prod.txt
+
+5. Copy `local_settings.py.example` to `local_settings.py` and modify.
Make sure to uncomment the appropriate db section (either sqlite or mysql).
-4. Sync the database to create it.
+6. Sync the database to create it.
- (archweb-env) $ ./manage.py syncdb
+ (parabolaweb-env) $ ./manage.py syncdb
-5. Migrate changes.
+7. Migrate changes.
- (archweb-env) $ ./manage.py migrate
+ (parabolaweb-env) $ ./manage.py migrate
-6. Load the fixtures to prepopulate some data. If you don't want some of the
+8. Load the fixtures to prepopulate some data. If you don't want some of the
provided data, adjust the file glob accordingly.
- (archweb-env) $ ./manage.py loaddata */fixtures/*.json
+ (parabolaweb-env) $ for file in */fixtures/*.json; do \
+ ./manage.py loaddata $file; \
+ done
+
+ The reason we use a loop instead of just calling them with a glob
+ is that groups.json is expected to fail, and this way it won't
+ prevent others from loading.
-7. Use the following commands to start a service instance
+9. Use the following commands to start a service instance
- (archweb-env) $ ./manage.py runserver
+ (parabolaweb-env) $ ./manage.py runserver
-8. To optionally populate the database with real data:
+10. To optionally populate the database with real data:
- (archweb-env) $ wget ftp://ftp.archlinux.org/core/os/i686/core.db.tar.gz
- (archweb-env) $ ./manage.py reporead i686 core.db.tar.gz
- (archweb-env) $ ./manage.py syncisos
+ $ wget https://repo.parabolagnulinux.org/core/os/i686/core.db.tar.gz
+ $ ./manage.py reporead i686 core.db.tar.gz
+ $ ./manage.py syncisos
Alter architecture and repo to get x86\_64 and packages from other repos if
needed.
diff --git a/TODO b/TODO
new file mode 100644
index 00000000..608d8470
--- /dev/null
+++ b/TODO
@@ -0,0 +1,4 @@
+TODO:
+ - refactor stats by templates in dashboard, maybe a templatetag
+
+
diff --git a/bin/activate b/bin/activate
deleted file mode 100644
index 796cc838..00000000
--- a/bin/activate
+++ /dev/null
@@ -1,76 +0,0 @@
-# This file must be used with "source bin/activate" *from bash*
-# you cannot run it directly
-
-deactivate () {
- # reset old environment variables
- if [ -n "$_OLD_VIRTUAL_PATH" ] ; then
- PATH="$_OLD_VIRTUAL_PATH"
- export PATH
- unset _OLD_VIRTUAL_PATH
- fi
- if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then
- PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
- export PYTHONHOME
- unset _OLD_VIRTUAL_PYTHONHOME
- fi
-
- # This should detect bash and zsh, which have a hash command that must
- # be called to get it to forget past commands. Without forgetting
- # past commands the $PATH changes we made may not be respected
- if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
- hash -r
- fi
-
- if [ -n "$_OLD_VIRTUAL_PS1" ] ; then
- PS1="$_OLD_VIRTUAL_PS1"
- export PS1
- unset _OLD_VIRTUAL_PS1
- fi
-
- unset VIRTUAL_ENV
- if [ ! "$1" = "nondestructive" ] ; then
- # Self destruct!
- unset -f deactivate
- fi
-}
-
-# unset irrelavent variables
-deactivate nondestructive
-
-VIRTUAL_ENV="/srv/http/web"
-export VIRTUAL_ENV
-
-_OLD_VIRTUAL_PATH="$PATH"
-PATH="$VIRTUAL_ENV/bin:$PATH"
-export PATH
-
-# unset PYTHONHOME if set
-# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
-# could use `if (set -u; : $PYTHONHOME) ;` in bash
-if [ -n "$PYTHONHOME" ] ; then
- _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
- unset PYTHONHOME
-fi
-
-if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then
- _OLD_VIRTUAL_PS1="$PS1"
- if [ "x" != x ] ; then
- PS1="$PS1"
- else
- if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
- # special case for Aspen magic directories
- # see http://www.zetadev.com/software/aspen/
- PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
- else
- PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
- fi
- fi
- export PS1
-fi
-
-# This should detect bash and zsh, which have a hash command that must
-# be called to get it to forget past commands. Without forgetting
-# past commands the $PATH changes we made may not be respected
-if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
- hash -r
-fi
diff --git a/bin/activate.csh b/bin/activate.csh
deleted file mode 100644
index 0774525d..00000000
--- a/bin/activate.csh
+++ /dev/null
@@ -1,32 +0,0 @@
-# This file must be used with "source bin/activate.csh" *from csh*.
-# You cannot run it directly.
-# Created by Davide Di Blasi <davidedb@gmail.com>.
-
-alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
-
-# Unset irrelavent variables.
-deactivate nondestructive
-
-setenv VIRTUAL_ENV "/srv/http/web"
-
-set _OLD_VIRTUAL_PATH="$PATH"
-setenv PATH "$VIRTUAL_ENV/bin:$PATH"
-
-set _OLD_VIRTUAL_PROMPT="$prompt"
-
-if ("" != "") then
- set env_name = ""
-else
- if (`basename "$VIRTUAL_ENV"` == "__") then
- # special case for Aspen magic directories
- # see http://www.zetadev.com/software/aspen/
- set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
- else
- set env_name = `basename "$VIRTUAL_ENV"`
- endif
-endif
-set prompt = "[$env_name] $prompt"
-unset env_name
-
-rehash
-
diff --git a/bin/activate.fish b/bin/activate.fish
deleted file mode 100644
index 39c644a7..00000000
--- a/bin/activate.fish
+++ /dev/null
@@ -1,79 +0,0 @@
-# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
-# you cannot run it directly
-
-function deactivate -d "Exit virtualenv and return to normal shell environment"
- # reset old environment variables
- if test -n "$_OLD_VIRTUAL_PATH"
- set -gx PATH $_OLD_VIRTUAL_PATH
- set -e _OLD_VIRTUAL_PATH
- end
- if test -n "$_OLD_VIRTUAL_PYTHONHOME"
- set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
- set -e _OLD_VIRTUAL_PYTHONHOME
- end
-
- if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
- functions -e fish_prompt
- set -e _OLD_FISH_PROMPT_OVERRIDE
- end
-
- set -e VIRTUAL_ENV
- if test "$argv[1]" != "nondestructive"
- # Self destruct!
- functions -e deactivate
- end
-end
-
-# unset irrelavent variables
-deactivate nondestructive
-
-set -gx VIRTUAL_ENV "/srv/http/web"
-
-set -gx _OLD_VIRTUAL_PATH $PATH
-set -gx PATH "$VIRTUAL_ENV/bin" $PATH
-
-# unset PYTHONHOME if set
-if set -q PYTHONHOME
- set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
- set -e PYTHONHOME
-end
-
-if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
- # fish shell uses a function, instead of env vars,
- # to produce the prompt. Overriding the existing function is easy.
- # However, adding to the current prompt, instead of clobbering it,
- # is a little more work.
- set -l oldpromptfile (tempfile)
- if test $status
- # save the current fish_prompt function...
- echo "function _old_fish_prompt" >> $oldpromptfile
- echo -n \# >> $oldpromptfile
- functions fish_prompt >> $oldpromptfile
- # we've made the "_old_fish_prompt" file, source it.
- . $oldpromptfile
- rm -f $oldpromptfile
-
- if test -n ""
- # We've been given us a prompt override.
- #
- # FIXME: Unsure how to handle this *safely*. We could just eval()
- # whatever is given, but the risk is a bit much.
- echo "activate.fish: Alternative prompt prefix is not supported under fish-shell." 1>&2
- echo "activate.fish: Alter the fish_prompt in this file as needed." 1>&2
- end
-
- # with the original prompt function renamed, we can override with our own.
- function fish_prompt
- set -l _checkbase (basename "$VIRTUAL_ENV")
- if test $_checkbase = "__"
- # special case for Aspen magic directories
- # see http://www.zetadev.com/software/aspen/
- printf "%s[%s]%s %s" (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) (_old_fish_prompt)
- else
- printf "%s(%s)%s%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) (_old_fish_prompt)
- end
- end
- set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
- end
-end
-
diff --git a/bin/activate_this.py b/bin/activate_this.py
deleted file mode 100644
index aff6927d..00000000
--- a/bin/activate_this.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""By using execfile(this_file, dict(__file__=this_file)) you will
-activate this virtualenv environment.
-
-This can be used when you must use an existing Python interpreter, not
-the virtualenv bin/python
-"""
-
-try:
- __file__
-except NameError:
- raise AssertionError(
- "You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")
-import sys
-import os
-
-base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-if sys.platform == 'win32':
- site_packages = os.path.join(base, 'Lib', 'site-packages')
-else:
- site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
-prev_sys_path = list(sys.path)
-import site
-site.addsitedir(site_packages)
-sys.real_prefix = sys.prefix
-sys.prefix = base
-# Move the added items to the front of the path:
-new_sys_path = []
-for item in list(sys.path):
- if item not in prev_sys_path:
- new_sys_path.append(item)
- sys.path.remove(item)
-sys.path[:0] = new_sys_path
diff --git a/bin/easy_install b/bin/easy_install
deleted file mode 100755
index eeb31a5c..00000000
--- a/bin/easy_install
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/srv/http/web/bin/python2
-# EASY-INSTALL-ENTRY-SCRIPT: 'distribute==0.6.14','console_scripts','easy_install'
-__requires__ = 'distribute==0.6.14'
-import sys
-from pkg_resources import load_entry_point
-
-if __name__ == '__main__':
- sys.exit(
- load_entry_point('distribute==0.6.14', 'console_scripts', 'easy_install')()
- )
diff --git a/bin/easy_install-2.7 b/bin/easy_install-2.7
deleted file mode 100755
index 09c3bf26..00000000
--- a/bin/easy_install-2.7
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/srv/http/web/bin/python2
-# EASY-INSTALL-ENTRY-SCRIPT: 'distribute==0.6.14','console_scripts','easy_install-2.7'
-__requires__ = 'distribute==0.6.14'
-import sys
-from pkg_resources import load_entry_point
-
-if __name__ == '__main__':
- sys.exit(
- load_entry_point('distribute==0.6.14', 'console_scripts', 'easy_install-2.7')()
- )
diff --git a/bin/pip b/bin/pip
deleted file mode 100755
index 2d851f27..00000000
--- a/bin/pip
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/srv/http/web/bin/python2
-# EASY-INSTALL-ENTRY-SCRIPT: 'pip==0.8.1','console_scripts','pip'
-__requires__ = 'pip==0.8.1'
-import sys
-from pkg_resources import load_entry_point
-
-if __name__ == '__main__':
- sys.exit(
- load_entry_point('pip==0.8.1', 'console_scripts', 'pip')()
- )
diff --git a/bin/pip-2.7 b/bin/pip-2.7
deleted file mode 100755
index 23e5741e..00000000
--- a/bin/pip-2.7
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/srv/http/web/bin/python2
-# EASY-INSTALL-ENTRY-SCRIPT: 'pip==0.8.1','console_scripts','pip-2.7'
-__requires__ = 'pip==0.8.1'
-import sys
-from pkg_resources import load_entry_point
-
-if __name__ == '__main__':
- sys.exit(
- load_entry_point('pip==0.8.1', 'console_scripts', 'pip-2.7')()
- )
diff --git a/bin/python b/bin/python
deleted file mode 100755
index 888b52ee..00000000
--- a/bin/python
+++ /dev/null
Binary files differ
diff --git a/bin/python2 b/bin/python2
deleted file mode 100755
index 888b52ee..00000000
--- a/bin/python2
+++ /dev/null
Binary files differ
diff --git a/devel/admin.py b/devel/admin.py
new file mode 100644
index 00000000..84082fb8
--- /dev/null
+++ b/devel/admin.py
@@ -0,0 +1,12 @@
+from django.contrib import admin
+
+from .models import MasterKey
+
+
+class MasterKeyAdmin(admin.ModelAdmin):
+ list_display = ('pgp_key', 'owner', 'created', 'revoker', 'revoked')
+ search_fields = ('pgp_key', 'owner', 'revoker')
+
+admin.site.register(MasterKey, MasterKeyAdmin)
+
+# vim: set ts=4 sw=4 et:
diff --git a/devel/management/commands/generate_keyring.py b/devel/management/commands/generate_keyring.py
new file mode 100644
index 00000000..062c738b
--- /dev/null
+++ b/devel/management/commands/generate_keyring.py
@@ -0,0 +1,86 @@
+# -*- coding: utf-8 -*-
+"""
+generate_keyring command
+
+Assemble a GPG keyring with all known developer keys.
+
+Usage: ./manage.py generate_keyring <keyserver> <keyring_path>
+"""
+
+from django.core.management.base import BaseCommand, CommandError
+
+import logging
+import subprocess
+import sys
+
+from devel.models import MasterKey
+from main.models import UserProfile
+
+logging.basicConfig(
+ level=logging.INFO,
+ format='%(asctime)s -> %(levelname)s: %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S',
+ stream=sys.stderr)
+logger = logging.getLogger()
+
+class Command(BaseCommand):
+ args = "<keyserver> <keyring_path> [ownertrust_path]"
+ help = "Assemble a GPG keyring with all known developer keys."
+
+ def handle(self, *args, **options):
+ v = int(options.get('verbosity', None))
+ if v == 0:
+ logger.level = logging.ERROR
+ elif v == 1:
+ logger.level = logging.INFO
+ elif v == 2:
+ logger.level = logging.DEBUG
+
+ if len(args) < 2:
+ raise CommandError("keyserver and keyring_path must be provided")
+
+ generate_keyring(args[0], args[1])
+
+ if len(args) > 2:
+ generate_ownertrust(args[2])
+
+
+def generate_keyring(keyserver, keyring):
+ logger.info("getting all known key IDs")
+
+ # Screw you Django, for not letting one natively do value != <empty string>
+ key_ids = UserProfile.objects.filter(user__is_active=True,
+ pgp_key__isnull=False).extra(where=["pgp_key != ''"]).values_list(
+ "pgp_key", flat=True)
+ logger.info("%d keys fetched from user profiles", len(key_ids))
+ master_key_ids = MasterKey.objects.values_list("pgp_key", flat=True)
+ logger.info("%d keys fetched from master keys", len(master_key_ids))
+
+ gpg_cmd = ["gpg", "--no-default-keyring", "--keyring", keyring,
+ "--keyserver", keyserver, "--recv-keys"]
+ logger.info("running command: %r", gpg_cmd)
+ gpg_cmd.extend(key_ids)
+ gpg_cmd.extend(master_key_ids)
+ subprocess.check_call(gpg_cmd)
+ logger.info("keyring at %s successfully updated", keyring)
+
+
+TRUST_LEVELS = {
+ 'unknown': 0,
+ 'expired': 1,
+ 'undefined': 2,
+ 'never': 3,
+ 'marginal': 4,
+ 'fully': 5,
+ 'ultimate': 6,
+}
+
+
+def generate_ownertrust(trust_path):
+ master_key_ids = MasterKey.objects.values_list("pgp_key", flat=True)
+ with open(trust_path, "w") as trustfile:
+ for key_id in master_key_ids:
+ trustfile.write("%s:%d:\n" % (key_id, TRUST_LEVELS['marginal']))
+ logger.info("trust file at %s created or overwritten", trust_path)
+
+# vim: set ts=4 sw=4 et:
diff --git a/devel/management/commands/rematch_packager.py b/devel/management/commands/rematch_packager.py
index ba6e6a54..461d83ab 100644
--- a/devel/management/commands/rematch_packager.py
+++ b/devel/management/commands/rematch_packager.py
@@ -24,7 +24,7 @@ logging.basicConfig(
logger = logging.getLogger()
class Command(NoArgsCommand):
- help = "Runs a check on all active mirror URLs to determine if they are reachable via IPv4 and/or v6."
+ help = "Match all packages with a packager_str but NULL packager_id to a packager if we can find one."
def handle_noargs(self, **options):
v = int(options.get('verbosity', None))
diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py
index cf597577..c444538b 100644
--- a/devel/management/commands/reporead.py
+++ b/devel/management/commands/reporead.py
@@ -13,10 +13,6 @@ Example:
./manage.py reporead i686 /tmp/core.db.tar.gz
"""
-from django.core.management.base import BaseCommand, CommandError
-from django.contrib.auth.models import User
-from django.db import transaction
-
from collections import defaultdict
import io
import os
@@ -27,6 +23,11 @@ import logging
from datetime import datetime
from optparse import make_option
+from django.core.management.base import BaseCommand, CommandError
+from django.contrib.auth.models import User
+from django.db import connections, router, transaction
+from django.db.utils import IntegrityError
+
from devel.utils import UserFinder
from main.models import Arch, Package, PackageDepend, PackageFile, Repo
from packages.models import Conflict, Provision, Replacement
@@ -36,6 +37,8 @@ logging.basicConfig(
format='%(asctime)s -> %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
stream=sys.stderr)
+TRACE = 5
+logging.addLevelName(TRACE, 'TRACE')
logger = logging.getLogger()
class Command(BaseCommand):
@@ -51,8 +54,6 @@ class Command(BaseCommand):
def handle(self, arch=None, filename=None, **options):
if not arch:
raise CommandError('Architecture is required.')
- if not validate_arch(arch):
- raise CommandError('Specified architecture %s is not currently known.' % arch)
if not filename:
raise CommandError('Package database file is required.')
filename = os.path.normpath(filename)
@@ -72,8 +73,8 @@ class Command(BaseCommand):
class Pkg(object):
"""An interim 'container' object for holding Arch package data."""
- bare = ( 'name', 'base', 'arch', 'desc', 'filename',
- 'md5sum', 'sha256sum', 'pgpsig', 'url', 'packager' )
+ bare = ( 'name', 'base', 'arch', 'filename',
+ 'md5sum', 'sha256sum', 'url', 'packager' )
number = ( 'csize', 'isize' )
collections = ( 'depends', 'optdepends', 'conflicts',
'provides', 'replaces', 'groups', 'license', 'files' )
@@ -85,6 +86,7 @@ class Pkg(object):
self.ver = None
self.rel = None
self.epoch = 0
+ self.pgpsig = None
for k in self.bare + self.number:
setattr(self, k, None)
for k in self.collections:
@@ -99,6 +101,9 @@ class Pkg(object):
setattr(self, k, v[0][:254])
elif k in self.number:
setattr(self, k, long(v[0]))
+ elif k in ('desc', 'pgpsig'):
+ # do NOT prune these values at all
+ setattr(self, k, v[0])
elif k == 'version':
match = self.version_re.match(v[0])
self.ver = match.group(3)
@@ -185,8 +190,6 @@ def create_multivalued(dbpkg, repopkg, db_attr, repo_attr):
finder = UserFinder()
def populate_pkg(dbpkg, repopkg, force=False, timestamp=None):
- db_score = 1
-
if repopkg.base:
dbpkg.pkgbase = repopkg.base
else:
@@ -210,7 +213,7 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None):
dbpkg.last_update = timestamp
dbpkg.save()
- db_score += populate_files(dbpkg, repopkg, force=force)
+ populate_files(dbpkg, repopkg, force=force)
dbpkg.packagedepend_set.all().delete()
for y in repopkg.depends:
@@ -231,28 +234,23 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None):
create_multivalued(dbpkg, repopkg, 'groups', 'groups')
create_multivalued(dbpkg, repopkg, 'licenses', 'license')
- related_score = (len(repopkg.depends) + len(repopkg.optdepends)
- + len(repopkg.conflicts) + len(repopkg.provides)
- + len(repopkg.replaces) + len(repopkg.groups)
- + len(repopkg.license))
- if related_score:
- db_score += (related_score / 20) + 1
- return db_score
+pkg_same_version = lambda pkg, dbpkg: pkg.ver == dbpkg.pkgver \
+ and pkg.rel == dbpkg.pkgrel and pkg.epoch == dbpkg.epoch
def populate_files(dbpkg, repopkg, force=False):
if not force:
- if dbpkg.pkgver != repopkg.ver or dbpkg.pkgrel != repopkg.rel \
- or dbpkg.epoch != repopkg.epoch:
+ if not pkg_same_version(repopkg, dbpkg):
logger.info("DB version (%s) didn't match repo version "
"(%s) for package %s, skipping file list addition",
dbpkg.full_version, repopkg.full_version, dbpkg.pkgname)
- return 0
+ return
if not dbpkg.files_last_update or not dbpkg.last_update:
pass
elif dbpkg.files_last_update > dbpkg.last_update:
- return 0
+ return
+
# only delete files if we are reading a DB that contains them
if repopkg.has_files:
dbpkg.packagefile_set.all().delete()
@@ -271,125 +269,156 @@ def populate_files(dbpkg, repopkg, force=False):
pkgfile.save(force_insert=True)
dbpkg.files_last_update = datetime.utcnow()
dbpkg.save()
- return (len(repopkg.files) / 50) + 1
- return 0
-class Batcher(object):
- def __init__(self, threshold, start=0):
- self.threshold = threshold
- self.meter = start
+def select_pkg_for_update(dbpkg):
+ database = router.db_for_write(Package, instance=dbpkg)
+ connection = connections[database]
+ if 'sqlite' in connection.settings_dict['ENGINE'].lower():
+ return dbpkg
+ new_pkg = Package.objects.raw(
+ 'SELECT * FROM packages WHERE id = %s FOR UPDATE',
+ [dbpkg.id])
+ return list(new_pkg)[0]
- def batch_commit(self, score):
- """
- Track updates to the database and perform a commit if the batch
- becomes sufficiently large. "Large" is defined by waiting for the
- sum of scores to exceed the arbitrary threshold value; once it is
- hit a commit is issued.
- """
- self.meter += score
- if self.meter > self.threshold:
- logger.debug("Committing transaction, batch threshold hit")
- transaction.commit()
- self.meter = 0
+def update_common(archname, reponame, pkgs, sanity_check=True):
+ with transaction.commit_manually():
+ repository = Repo.objects.get(name__iexact=reponame)
+ architecture = Arch.objects.get(name__iexact=archname)
+ # no-arg order_by() removes even the default ordering; we don't need it
+ dbpkgs = Package.objects.filter(
+ arch=architecture, repo=repository).order_by()
-@transaction.commit_on_success
-def db_update(archname, reponame, pkgs, options):
- """
- Parses a list and updates the Arch dev database accordingly.
+ logger.info("%d packages in current web DB", len(dbpkgs))
+ logger.info("%d packages in new updating DB", len(pkgs))
- Arguments:
- pkgs -- A list of Pkg objects.
+ # Try to catch those random package deletions that make Eric so unhappy.
+ if len(dbpkgs):
+ dbpercent = 100.0 * len(pkgs) / len(dbpkgs)
+ else:
+ dbpercent = 0.0
+ logger.info("DB package ratio: %.1f%%", dbpercent)
+
+ # Fewer than 20 packages makes the percentage check unreliable, but it also
+ # means we expect the repo to fluctuate a lot.
+ msg = "Package database has %.1f%% the number of packages in the " \
+ "web database" % dbpercent
+ if not sanity_check:
+ pass
+ elif repository.testing or repository.staging:
+ pass
+ elif len(dbpkgs) == 0 and len(pkgs) == 0:
+ pass
+ elif len(dbpkgs) > 20 and dbpercent < 50.0:
+ logger.error(msg)
+ raise Exception(msg)
+ elif dbpercent < 75.0:
+ logger.warning(msg)
+
+ # If isolation level is repeatable-read, we need to ensure each package
+ # update starts a new transaction and re-queries the database as necessary
+ # to guard against simultaneous updates
+ transaction.commit()
+ return dbpkgs
+
+def db_update(archname, reponame, pkgs, force=False):
"""
- logger.info('Updating Arch: %s', archname)
- force = options.get('force', False)
- filesonly = options.get('filesonly', False)
+ Parses a list of packages and updates the packages database accordingly.
+ """
+ logger.info('Updating %s (%s)', reponame, archname)
+ dbpkgs = update_common(archname, reponame, pkgs, True)
repository = Repo.objects.get(name__iexact=reponame)
architecture = Arch.objects.get(name__iexact=archname)
- # no-arg order_by() removes even the default ordering; we don't need it
- dbpkgs = Package.objects.filter(
- arch=architecture, repo=repository).order_by()
+
# This makes our inner loop where we find packages by name *way* more
# efficient by not having to go to the database for each package to
# SELECT them by name.
- dbdict = dict([(pkg.pkgname, pkg) for pkg in dbpkgs])
+ dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs)
- logger.debug("Creating sets")
dbset = set(dbdict.keys())
syncset = set([pkg.name for pkg in pkgs])
- logger.info("%d packages in current web DB", len(dbset))
- logger.info("%d packages in new updating db", len(syncset))
+
in_sync_not_db = syncset - dbset
logger.info("%d packages in sync not db", len(in_sync_not_db))
-
- # Try to catch those random package deletions that make Eric so unhappy.
- if len(dbset):
- dbpercent = 100.0 * len(syncset) / len(dbset)
- else:
- dbpercent = 0.0
- logger.info("DB package ratio: %.1f%%", dbpercent)
-
- # Fewer than 20 packages makes the percentage check unreliable, but it also
- # means we expect the repo to fluctuate a lot.
- msg = "Package database has %.1f%% the number of packages in the " \
- "web database" % dbpercent
- if len(dbset) == 0 and len(syncset) == 0:
- pass
- elif not filesonly and \
- len(dbset) > 20 and dbpercent < 50.0 and \
- not repository.testing and not repository.staging:
- logger.error(msg)
- raise Exception(msg)
- elif dbpercent < 75.0:
- logger.warning(msg)
-
- batcher = Batcher(100)
-
- if not filesonly:
- # packages in syncdb and not in database (add to database)
- for p in [x for x in pkgs if x.name in in_sync_not_db]:
- logger.info("Adding package %s", p.name)
- pkg = Package(pkgname=p.name, arch=architecture, repo=repository)
- score = populate_pkg(pkg, p, timestamp=datetime.utcnow())
- batcher.batch_commit(score)
-
- # packages in database and not in syncdb (remove from database)
- in_db_not_sync = dbset - syncset
- for p in in_db_not_sync:
- logger.info("Removing package %s", p)
- dbp = dbdict[p]
- dbp.delete()
- batcher.batch_commit(1)
+ # packages in syncdb and not in database (add to database)
+ for pkg in (pkg for pkg in pkgs if pkg.name in in_sync_not_db):
+ logger.info("Adding package %s", pkg.name)
+ dbpkg = Package(pkgname=pkg.name, arch=architecture, repo=repository)
+ try:
+ with transaction.commit_on_success():
+ populate_pkg(dbpkg, pkg, timestamp=datetime.utcnow())
+ except IntegrityError:
+ logger.warning("Could not add package %s; "
+ "not fatal if another thread beat us to it.",
+ pkg.name, exc_info=True)
+
+ # packages in database and not in syncdb (remove from database)
+ for pkgname in (dbset - syncset):
+ logger.info("Removing package %s", pkgname)
+ dbpkg = dbdict[pkgname]
+ with transaction.commit_on_success():
+ # no race condition here as long as simultaneous threads both
+ # issue deletes; second delete will be a no-op
+ dbpkg.delete()
# packages in both database and in syncdb (update in database)
pkg_in_both = syncset & dbset
- for p in [x for x in pkgs if x.name in pkg_in_both]:
- logger.debug("Looking for package updates")
- dbp = dbdict[p.name]
+ for pkg in (x for x in pkgs if x.name in pkg_in_both):
+ logger.debug("Checking package %s", pkg.name)
+ dbpkg = dbdict[pkg.name]
timestamp = None
# for a force, we don't want to update the timestamp.
# for a non-force, we don't want to do anything at all.
- if filesonly:
- pass
- elif p.ver == dbp.pkgver and p.rel == dbp.pkgrel \
- and p.epoch == dbp.epoch:
+ if pkg_same_version(pkg, dbpkg):
if not force:
continue
else:
timestamp = datetime.utcnow()
- if filesonly:
- logger.debug("Checking files for package %s", p.name)
- score = populate_files(dbp, p, force=force)
- else:
- logger.info("Updating package %s", p.name)
- score = populate_pkg(dbp, p, force=force, timestamp=timestamp)
+ # The odd select_for_update song and dance here are to ensure
+ # simultaneous updates don't happen on a package, causing
+ # files/depends/all related items to be double-imported.
+ with transaction.commit_on_success():
+ # TODO Django 1.4 select_for_update() will work once released
+ dbpkg = select_pkg_for_update(dbpkg)
+ if pkg_same_version(pkg, dbpkg):
+ logger.debug("Package %s was already updated", pkg.name)
+ continue
+ logger.info("Updating package %s", pkg.name)
+ populate_pkg(dbpkg, pkg, force=force, timestamp=timestamp)
- batcher.batch_commit(score)
+ logger.info('Finished updating arch: %s', archname)
- logger.info('Finished updating Arch: %s', archname)
+
+def filesonly_update(archname, reponame, pkgs, force=False):
+ """
+ Parses a list of packages and updates the packages database accordingly.
+ """
+ logger.info('Updating files for %s (%s)', reponame, archname)
+ dbpkgs = update_common(archname, reponame, pkgs, False)
+ dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs)
+ dbset = set(dbdict.keys())
+
+ for pkg in (pkg for pkg in pkgs if pkg.name in dbset):
+ dbpkg = dbdict[pkg.name]
+
+ # The odd select_for_update song and dance here are to ensure
+ # simultaneous updates don't happen on a package, causing
+ # files to be double-imported.
+ with transaction.commit_on_success():
+ if not dbpkg.files_last_update or not dbpkg.last_update:
+ pass
+ elif dbpkg.files_last_update > dbpkg.last_update:
+ logger.debug("Files for %s are up to date", pkg.name)
+ continue
+ # TODO Django 1.4 select_for_update() will work once released
+ dbpkg = select_pkg_for_update(dbpkg)
+ logger.debug("Checking files for package %s", pkg.name)
+ populate_files(dbpkg, pkg, force=force)
+
+ logger.info('Finished updating arch: %s', archname)
def parse_info(iofile):
@@ -404,7 +433,7 @@ def parse_info(iofile):
continue
elif line.startswith('%') and line.endswith('%'):
blockname = line[1:-1].lower()
- logger.debug("Parsing package block %s", blockname)
+ logger.log(TRACE, "Parsing package block %s", blockname)
store[blockname] = []
elif blockname:
store[blockname].append(line)
@@ -446,49 +475,64 @@ def parse_repo(repopath):
continue
data_file = repodb.extractfile(tarinfo)
data_file = io.TextIOWrapper(io.BytesIO(data_file.read()),
- encoding='utf=8')
+ encoding='UTF-8')
try:
pkgs[pkgid].populate(parse_info(data_file))
except UnicodeDecodeError:
logger.warn("Could not correctly decode %s, skipping file",
tarinfo.name)
data_file.close()
+ del data_file
- logger.debug("Done parsing file %s", fname)
+ logger.debug("Done parsing file %s/%s", pkgid, fname)
repodb.close()
logger.info("Finished repo parsing, %d total packages", len(pkgs))
return (reponame, pkgs.values())
-def validate_arch(archname):
+def locate_arch(arch):
"Check if arch is valid."
- return Arch.objects.filter(name__iexact=archname).exists()
+ if isinstance(arch, Arch):
+ return arch
+ try:
+ return Arch.objects.get(name__iexact=arch)
+ except Arch.DoesNotExist:
+ raise CommandError(
+ 'Specified architecture %s is not currently known.' % arch)
+
def read_repo(primary_arch, repo_file, options):
"""
Parses repo.db.tar.gz file and returns exit status.
"""
+ # always returns an Arch object, regardless of what is passed in
+ primary_arch = locate_arch(primary_arch)
+ force = options.get('force', False)
+ filesonly = options.get('filesonly', False)
+
repo, packages = parse_repo(repo_file)
# group packages by arch -- to handle noarch stuff
packages_arches = {}
for arch in Arch.objects.filter(agnostic=True):
packages_arches[arch.name] = []
- packages_arches[primary_arch] = []
+ packages_arches[primary_arch.name] = []
for package in packages:
if package.arch in packages_arches:
packages_arches[package.arch].append(package)
else:
# we don't include mis-arched packages
- logger.warning("Package %s arch = %s",
- package.name, package.arch)
+ logger.warning("Package %s arch = %s", package.name, package.arch)
del packages
- logger.info('Starting database updates.')
+ logger.info('Starting database updates for %s.', repo_file)
for arch in sorted(packages_arches.keys()):
- db_update(arch, repo, packages_arches[arch], options)
- logger.info('Finished database updates.')
+ if filesonly:
+ filesonly_update(arch, repo, packages_arches[arch], force)
+ else:
+ db_update(arch, repo, packages_arches[arch], force)
+ logger.info('Finished database updates for %s.', repo_file)
return 0
# vim: set ts=4 sw=4 et:
diff --git a/devel/management/commands/reporead_inotify.py b/devel/management/commands/reporead_inotify.py
new file mode 100755
index 00000000..c74762eb
--- /dev/null
+++ b/devel/management/commands/reporead_inotify.py
@@ -0,0 +1,211 @@
+# -*- coding: utf-8 -*-
+"""
+reporead_inotify command
+
+Watches repo.files.tar.gz files for updates and parses them after a short delay
+in order to catch all updates in a single bulk update.
+
+Usage: ./manage.py reporead_inotify [path_template]
+
+Where 'path_template' is an optional path_template for finding the
+repo.files.tar.gz files. The form is '/srv/ftp/%(repo)s/os/%(arch)s/', which is
+also the default template if none is specified. While 'repo' is not required to
+be present in the path_template, note that 'arch' is so reporead can function
+correctly.
+"""
+
+import logging
+import multiprocessing
+import os
+import pyinotify
+import sys
+import threading
+import time
+
+from django.core.management.base import BaseCommand, CommandError
+from django.db import connection
+
+from main.models import Arch, Repo
+from .reporead import read_repo
+
+logging.basicConfig(
+ level=logging.WARNING,
+ format='%(asctime)s -> %(levelname)s: %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S',
+ stream=sys.stderr)
+logger = logging.getLogger()
+
+class Command(BaseCommand):
+ help = "Watch database files and run an update when necessary."
+ args = "[path_template]"
+
+ def handle(self, path_template=None, **options):
+ v = int(options.get('verbosity', 0))
+ if v == 0:
+ logger.level = logging.ERROR
+ elif v == 1:
+ logger.level = logging.INFO
+ elif v == 2:
+ logger.level = logging.DEBUG
+
+ if not path_template:
+ path_template = '/srv/ftp/%(repo)s/os/%(arch)s/'
+ self.path_template = path_template
+
+ notifier = self.setup_notifier()
+ logger.info('Entering notifier loop')
+ notifier.loop()
+
+ logger.info('Cancelling remaining threads...')
+ for thread in threading.enumerate():
+ if hasattr(thread, 'cancel'):
+ thread.cancel()
+
+ def setup_notifier(self):
+ '''Set up and configure the inotify machinery and logic.
+ This takes the provided or default path_template and builds a list of
+ directories we need to watch for database updates. It then validates
+ and passes these on to the various pyinotify pieces as necessary and
+ finally builds and returns a notifier object.'''
+ arches = Arch.objects.filter(agnostic=False)
+ repos = Repo.objects.all()
+ arch_path_map = dict((arch, None) for arch in arches)
+ all_paths = set()
+ total_paths = 0
+ for arch in arches:
+ combos = ({ 'repo': repo.name.lower(), 'arch': arch.name }
+ for repo in repos)
+ # take a python format string and generate all unique combinations
+ # of directories from it; using set() ensures we filter it down
+ paths = set(self.path_template % values for values in combos)
+ total_paths += len(paths)
+ all_paths |= paths
+ arch_path_map[arch] = paths
+
+ logger.info('Watching %d total paths', total_paths)
+ logger.debug(all_paths)
+
+ # sanity check- basically ensure every path we created from the
+ # template mapped to only one architecture
+ if total_paths != len(all_paths):
+ raise CommandError('path template did not uniquely '
+ 'determine architecture for each file')
+
+ # this thread is done using the database; all future access is done in
+ # the spawned read_repo() processes, so close the otherwise completely
+ # idle connection.
+ connection.close()
+
+ # A proper atomic replacement of the database as done by rsync is type
+ # IN_MOVED_TO. repo-add/remove will finish with a IN_CLOSE_WRITE.
+ mask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO
+
+ manager = pyinotify.WatchManager()
+ for name in all_paths:
+ manager.add_watch(name, mask)
+
+ handler = EventHandler(arch_paths=arch_path_map)
+ return pyinotify.Notifier(manager, handler)
+
+
+class Database(object):
+ '''A object representing a pacman database on the filesystem. It stores
+ various bits of metadata and state representing the file path, when we last
+ updated, how long our delay is before performing the update, whether we are
+ updating now, etc.'''
+ def __init__(self, arch, path, delay=60.0, nice=3):
+ self.arch = arch
+ self.path = path
+ self.delay = delay
+ self.nice = nice
+ self.mtime = None
+ self.last_import = None
+ self.update_thread = None
+ self.updating = False
+ self.run_again = False
+ self.lock = threading.Lock()
+
+ def _start_update_countdown(self):
+ self.update_thread = threading.Timer(self.delay, self.update)
+ logger.info('Starting %.1f second countdown to update %s',
+ self.delay, self.path)
+ self.update_thread.start()
+
+ def queue_for_update(self, mtime):
+ logger.debug('Queueing database %s...', self.path)
+ with self.lock:
+ self.mtime = mtime
+ if self.updating:
+ # store the fact that we will need to run it again
+ self.run_again = True
+ return
+ if self.update_thread:
+ self.update_thread.cancel()
+ self.update_thread = None
+ self._start_update_countdown()
+
+ def update(self):
+ logger.debug('Updating database %s...', self.path)
+ with self.lock:
+ self.last_import = time.time()
+ self.updating = True
+
+ try:
+ # invoke reporead's primary method. we do this in a separate
+ # process for memory conservation purposes; these processes grow
+ # rather large so it is best to free up the memory ASAP.
+ def run():
+ if self.nice != 0:
+ os.nice(self.nice)
+ read_repo(self.arch, self.path, {})
+
+ process = multiprocessing.Process(target=run)
+ process.start()
+ process.join()
+ finally:
+ logger.debug('Done updating database %s.', self.path)
+ with self.lock:
+ self.update_thread = None
+ self.updating = False
+ if self.run_again:
+ self.run_again = False
+ self._start_update_countdown()
+
+
+class EventHandler(pyinotify.ProcessEvent):
+ '''Our main event handler which listens for database change events. Because
+ we are watching the whole directory, we filter down and only look at those
+ events dealing with files databases.'''
+
+ def my_init(self, **kwargs):
+ self.databases = {}
+ self.arch_lookup = {}
+
+ # we really want a single path to arch mapping, so massage the data
+ arch_paths = kwargs['arch_paths']
+ for arch, paths in arch_paths.items():
+ self.arch_lookup.update((path.rstrip('/'), arch) for path in paths)
+
+ def process_default(self, event):
+ '''Primary event processing function which kicks off reporead timer
+ threads if a files database was updated.'''
+ if not event.name:
+ return
+ # screen to only the files we care about
+ if event.name.endswith('.files.tar.gz'):
+ path = event.pathname
+ stat = os.stat(path)
+ database = self.databases.get(path, None)
+ if database is None:
+ arch = self.arch_lookup.get(event.path, None)
+ if arch is None:
+ logger.warning(
+ 'Could not determine arch for %s, skipping update',
+ path)
+ return
+ database = Database(arch, path)
+ self.databases[path] = database
+ database.queue_for_update(stat.st_mtime)
+
+
+# vim: set ts=4 sw=4 et:
diff --git a/devel/migrations/0001_initial.py b/devel/migrations/0001_initial.py
new file mode 100644
index 00000000..c28fc20f
--- /dev/null
+++ b/devel/migrations/0001_initial.py
@@ -0,0 +1,18 @@
+# encoding: utf-8
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ pass
+
+
+ def backwards(self, orm):
+ pass
+
+
+ models = {}
+
+ complete_apps = ['devel']
diff --git a/devel/migrations/0002_auto__add_masterkey.py b/devel/migrations/0002_auto__add_masterkey.py
new file mode 100644
index 00000000..ac1f745a
--- /dev/null
+++ b/devel/migrations/0002_auto__add_masterkey.py
@@ -0,0 +1,76 @@
+# encoding: utf-8
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ depends_on = (
+ ("main", "0051_auto__chg_field_userprofile_pgp_key"),
+ )
+
+ def forwards(self, orm):
+ # Adding model 'MasterKey'
+ db.create_table('devel_masterkey', (
+ ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('owner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='masterkey_owner', to=orm['auth.User'])),
+ ('revoker', self.gf('django.db.models.fields.related.ForeignKey')(related_name='masterkey_revoker', to=orm['auth.User'])),
+ ('pgp_key', self.gf('main.fields.PGPKeyField')(max_length=40)),
+ ('created', self.gf('django.db.models.fields.DateTimeField')()),
+ ('revoked', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
+ ))
+ db.send_create_signal('devel', ['MasterKey'])
+
+ def backwards(self, orm):
+ db.delete_table('devel_masterkey')
+
+
+ models = {
+ 'auth.group': {
+ 'Meta': {'object_name': 'Group'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
+ 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
+ },
+ 'auth.permission': {
+ 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
+ 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
+ },
+ 'auth.user': {
+ 'Meta': {'object_name': 'User'},
+ 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
+ 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
+ },
+ 'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ 'devel.masterkey': {
+ 'Meta': {'object_name': 'MasterKey'},
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'masterkey_owner'", 'to': "orm['auth.User']"}),
+ 'pgp_key': ('main.fields.PGPKeyField', [], {'max_length': '40'}),
+ 'revoked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'revoker': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'masterkey_revoker'", 'to': "orm['auth.User']"})
+ }
+ }
+
+ complete_apps = ['devel']
diff --git a/devel/migrations/__init__.py b/devel/migrations/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/devel/migrations/__init__.py
diff --git a/devel/models.py b/devel/models.py
index e69de29b..f31b8fbb 100644
--- a/devel/models.py
+++ b/devel/models.py
@@ -0,0 +1,20 @@
+from django.db import models
+from django.contrib.auth.models import User
+
+from main.fields import PGPKeyField
+
+
+class MasterKey(models.Model):
+ owner = models.ForeignKey(User, related_name='masterkey_owner',
+ help_text="The developer holding this master key")
+ revoker = models.ForeignKey(User, related_name='masterkey_revoker',
+ help_text="The developer holding the revocation certificate")
+ pgp_key = PGPKeyField(max_length=40, verbose_name="PGP key fingerprint",
+ help_text="consists of 40 hex digits; use `gpg --fingerprint`")
+ created = models.DateTimeField()
+ revoked = models.DateTimeField(null=True, blank=True)
+
+ class Meta:
+ ordering = ('created',)
+
+# vim: set ts=4 sw=4 et:
diff --git a/devel/utils.py b/devel/utils.py
index d7a154a8..62b12cd5 100644
--- a/devel/utils.py
+++ b/devel/utils.py
@@ -47,6 +47,7 @@ SELECT pr.user_id, COUNT(*), COUNT(p.flag_date)
class UserFinder(object):
def __init__(self):
self.cache = {}
+ self.username_cache = {}
@staticmethod
def user_email(name, email):
@@ -111,7 +112,22 @@ class UserFinder(object):
self.cache[userstring] = user
return user
+ def find_by_username(self, username):
+ if not username:
+ return None
+ if username in self.username_cache:
+ return self.username_cache[username]
+
+ try:
+ user = User.objects.get(username=username)
+ except User.DoesNotExist:
+ user = None
+
+ self.username_cache[username] = user
+ return user
+
def clear_cache(self):
self.cache = {}
+ self.username_cache = {}
# vim: set ts=4 sw=4 et:
diff --git a/devel/views.py b/devel/views.py
index 27c32e7b..b9bd7cce 100644
--- a/devel/views.py
+++ b/devel/views.py
@@ -18,6 +18,7 @@ from main.models import Package, PackageDepend, PackageFile, TodolistPkg
from main.models import Arch, Repo
from main.models import UserProfile
from packages.models import PackageRelation
+from packages.utils import get_signoff_groups
from todolists.utils import get_annotated_todolists
from .utils import get_annotated_maintainers
@@ -31,7 +32,12 @@ from string import ascii_letters, digits
@never_cache
def index(request):
'''the developer dashboard'''
- inner_q = PackageRelation.objects.filter(user=request.user).values('pkgbase')
+ if(request.user.is_authenticated()):
+ inner_q = PackageRelation.objects.filter(user=request.user)
+ else:
+ inner_q = PackageRelation.objects.none()
+ inner_q = inner_q.values('pkgbase')
+
flagged = Package.objects.normal().filter(
flag_date__isnull=False, pkgbase__in=inner_q).order_by('pkgname')
@@ -43,6 +49,9 @@ def index(request):
todolists = get_annotated_todolists()
todolists = [todolist for todolist in todolists if todolist.incomplete_count > 0]
+ signoffs = sorted(get_signoff_groups(user=request.user),
+ key=operator.attrgetter('pkgbase'))
+
maintainers = get_annotated_maintainers()
maintained = PackageRelation.objects.filter(
@@ -65,6 +74,7 @@ def index(request):
'orphan': orphan,
'flagged' : flagged,
'todopkgs' : todopkgs,
+ 'signoffs': signoffs
}
return direct_to_template(request, 'devel/index.html', page_dict)
@@ -73,11 +83,11 @@ def index(request):
@never_cache
def clock(request):
devs = User.objects.filter(is_active=True).order_by(
- 'username').select_related('userprofile')
+ 'first_name', 'last_name').select_related('userprofile')
- # now annotate each dev object with their current time
now = datetime.now()
utc_now = datetime.utcnow().replace(tzinfo=pytz.utc)
+ # now annotate each dev object with their current time
for dev in devs:
tz = pytz.timezone(dev.userprofile.time_zone)
dev.current_time = utc_now.astimezone(tz)
@@ -142,12 +152,12 @@ def report(request, report, username=None):
if report == 'old':
title = 'Packages last built more than two years ago'
- cutoff = datetime.now() - timedelta(days=365 * 2)
+ cutoff = datetime.utcnow() - timedelta(days=365 * 2)
packages = packages.filter(
build_date__lt=cutoff).order_by('build_date')
elif report == 'long-out-of-date':
title = 'Packages marked out-of-date more than 90 days ago'
- cutoff = datetime.now() - timedelta(days=90)
+ cutoff = datetime.utcnow() - timedelta(days=90)
packages = packages.filter(
flag_date__lt=cutoff).order_by('flag_date')
elif report == 'big':
@@ -282,7 +292,7 @@ class NewUserForm(forms.ModelForm):
send_mail("Your new parabolaweb account",
template.render(ctx),
- 'Parabola <dev@list.parabolagnulinux.org>',
+ 'Parabola <dev@lists.parabolagnulinux.org>',
[user.email],
fail_silently=False)
diff --git a/lib/python2.7/UserDict.py b/lib/python2.7/UserDict.py
deleted file mode 120000
index 1dcde33c..00000000
--- a/lib/python2.7/UserDict.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/UserDict.py \ No newline at end of file
diff --git a/lib/python2.7/_abcoll.py b/lib/python2.7/_abcoll.py
deleted file mode 120000
index e39c38d2..00000000
--- a/lib/python2.7/_abcoll.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/_abcoll.py \ No newline at end of file
diff --git a/lib/python2.7/_weakrefset.py b/lib/python2.7/_weakrefset.py
deleted file mode 120000
index a3c1cd4f..00000000
--- a/lib/python2.7/_weakrefset.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/_weakrefset.py \ No newline at end of file
diff --git a/lib/python2.7/abc.py b/lib/python2.7/abc.py
deleted file mode 120000
index cb3e5d16..00000000
--- a/lib/python2.7/abc.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/abc.py \ No newline at end of file
diff --git a/lib/python2.7/codecs.py b/lib/python2.7/codecs.py
deleted file mode 120000
index 50169dc7..00000000
--- a/lib/python2.7/codecs.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/codecs.py \ No newline at end of file
diff --git a/lib/python2.7/config b/lib/python2.7/config
deleted file mode 120000
index 154af7a5..00000000
--- a/lib/python2.7/config
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/config \ No newline at end of file
diff --git a/lib/python2.7/copy_reg.py b/lib/python2.7/copy_reg.py
deleted file mode 120000
index 5dc0af34..00000000
--- a/lib/python2.7/copy_reg.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/copy_reg.py \ No newline at end of file
diff --git a/lib/python2.7/distutils/__init__.py b/lib/python2.7/distutils/__init__.py
deleted file mode 100644
index 7ebb41c0..00000000
--- a/lib/python2.7/distutils/__init__.py
+++ /dev/null
@@ -1,91 +0,0 @@
-import os
-import sys
-import warnings
-import opcode # opcode is not a virtualenv module, so we can use it to find the stdlib
- # Important! To work on pypy, this must be a module that resides in the
- # lib-python/modified-x.y.z directory
-
-dirname = os.path.dirname
-
-distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
-if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)):
- warnings.warn(
- "The virtualenv distutils package at %s appears to be in the same location as the system distutils?")
-else:
- __path__.insert(0, distutils_path)
- exec open(os.path.join(distutils_path, '__init__.py')).read()
-
-import dist
-import sysconfig
-
-
-## patch build_ext (distutils doesn't know how to get the libs directory
-## path on windows - it hardcodes the paths around the patched sys.prefix)
-
-if sys.platform == 'win32':
- from distutils.command.build_ext import build_ext as old_build_ext
- class build_ext(old_build_ext):
- def finalize_options (self):
- if self.library_dirs is None:
- self.library_dirs = []
- elif isinstance(self.library_dirs, basestring):
- self.library_dirs = self.library_dirs.split(os.pathsep)
-
- self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs"))
- old_build_ext.finalize_options(self)
-
- from distutils.command import build_ext as build_ext_module
- build_ext_module.build_ext = build_ext
-
-## distutils.dist patches:
-
-old_find_config_files = dist.Distribution.find_config_files
-def find_config_files(self):
- found = old_find_config_files(self)
- system_distutils = os.path.join(distutils_path, 'distutils.cfg')
- #if os.path.exists(system_distutils):
- # found.insert(0, system_distutils)
- # What to call the per-user config file
- if os.name == 'posix':
- user_filename = ".pydistutils.cfg"
- else:
- user_filename = "pydistutils.cfg"
- user_filename = os.path.join(sys.prefix, user_filename)
- if os.path.isfile(user_filename):
- for item in list(found):
- if item.endswith('pydistutils.cfg'):
- found.remove(item)
- found.append(user_filename)
- return found
-dist.Distribution.find_config_files = find_config_files
-
-## distutils.sysconfig patches:
-
-old_get_python_inc = sysconfig.get_python_inc
-def sysconfig_get_python_inc(plat_specific=0, prefix=None):
- if prefix is None:
- prefix = sys.real_prefix
- return old_get_python_inc(plat_specific, prefix)
-sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__
-sysconfig.get_python_inc = sysconfig_get_python_inc
-
-old_get_python_lib = sysconfig.get_python_lib
-def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
- if standard_lib and prefix is None:
- prefix = sys.real_prefix
- return old_get_python_lib(plat_specific, standard_lib, prefix)
-sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__
-sysconfig.get_python_lib = sysconfig_get_python_lib
-
-old_get_config_vars = sysconfig.get_config_vars
-def sysconfig_get_config_vars(*args):
- real_vars = old_get_config_vars(*args)
- if sys.platform == 'win32':
- lib_dir = os.path.join(sys.real_prefix, "libs")
- if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars:
- real_vars['LIBDIR'] = lib_dir # asked for all
- elif isinstance(real_vars, list) and 'LIBDIR' in args:
- real_vars = real_vars + [lib_dir] # asked for list
- return real_vars
-sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__
-sysconfig.get_config_vars = sysconfig_get_config_vars
diff --git a/lib/python2.7/distutils/distutils.cfg b/lib/python2.7/distutils/distutils.cfg
deleted file mode 100644
index 1af230ec..00000000
--- a/lib/python2.7/distutils/distutils.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# This is a config file local to this virtualenv installation
-# You may include options that will be used by all distutils commands,
-# and by easy_install. For instance:
-#
-# [easy_install]
-# find_links = http://mylocalsite
diff --git a/lib/python2.7/encodings b/lib/python2.7/encodings
deleted file mode 120000
index 1250ad86..00000000
--- a/lib/python2.7/encodings
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/encodings \ No newline at end of file
diff --git a/lib/python2.7/fnmatch.py b/lib/python2.7/fnmatch.py
deleted file mode 120000
index ec3e10cf..00000000
--- a/lib/python2.7/fnmatch.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/fnmatch.py \ No newline at end of file
diff --git a/lib/python2.7/genericpath.py b/lib/python2.7/genericpath.py
deleted file mode 120000
index cb8897ce..00000000
--- a/lib/python2.7/genericpath.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/genericpath.py \ No newline at end of file
diff --git a/lib/python2.7/lib-dynload b/lib/python2.7/lib-dynload
deleted file mode 120000
index c706a1eb..00000000
--- a/lib/python2.7/lib-dynload
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/lib-dynload \ No newline at end of file
diff --git a/lib/python2.7/linecache.py b/lib/python2.7/linecache.py
deleted file mode 120000
index 943c4297..00000000
--- a/lib/python2.7/linecache.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/linecache.py \ No newline at end of file
diff --git a/lib/python2.7/locale.py b/lib/python2.7/locale.py
deleted file mode 120000
index 92c243c6..00000000
--- a/lib/python2.7/locale.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/locale.py \ No newline at end of file
diff --git a/lib/python2.7/ntpath.py b/lib/python2.7/ntpath.py
deleted file mode 120000
index 5659ae14..00000000
--- a/lib/python2.7/ntpath.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/ntpath.py \ No newline at end of file
diff --git a/lib/python2.7/orig-prefix.txt b/lib/python2.7/orig-prefix.txt
deleted file mode 100644
index e25db584..00000000
--- a/lib/python2.7/orig-prefix.txt
+++ /dev/null
@@ -1 +0,0 @@
-/usr \ No newline at end of file
diff --git a/lib/python2.7/os.py b/lib/python2.7/os.py
deleted file mode 120000
index 950fc8d2..00000000
--- a/lib/python2.7/os.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/os.py \ No newline at end of file
diff --git a/lib/python2.7/posixpath.py b/lib/python2.7/posixpath.py
deleted file mode 120000
index 30cb8ca5..00000000
--- a/lib/python2.7/posixpath.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/posixpath.py \ No newline at end of file
diff --git a/lib/python2.7/re.py b/lib/python2.7/re.py
deleted file mode 120000
index 56a07316..00000000
--- a/lib/python2.7/re.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/re.py \ No newline at end of file
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO
deleted file mode 100644
index 629e916b..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO
+++ /dev/null
@@ -1,531 +0,0 @@
-Metadata-Version: 1.0
-Name: distribute
-Version: 0.6.14
-Summary: Easily download, build, install, upgrade, and uninstall Python packages
-Home-page: http://packages.python.org/distribute
-Author: The fellowship of the packaging
-Author-email: distutils-sig@python.org
-License: PSF or ZPL
-Description: ===============================
- Installing and Using Distribute
- ===============================
-
- .. contents:: **Table of Contents**
-
- -----------
- Disclaimers
- -----------
-
- About the fork
- ==============
-
- `Distribute` is a fork of the `Setuptools` project.
-
- Distribute is intended to replace Setuptools as the standard method
- for working with Python module distributions.
-
- The fork has two goals:
-
- - Providing a backward compatible version to replace Setuptools
- and make all distributions that depend on Setuptools work as
- before, but with less bugs and behaviorial issues.
-
- This work is done in the 0.6.x series.
-
- Starting with version 0.6.2, Distribute supports Python 3.
- Installing and using distribute for Python 3 code works exactly
- the same as for Python 2 code, but Distribute also helps you to support
- Python 2 and Python 3 from the same source code by letting you run 2to3
- on the code as a part of the build process, by setting the keyword parameter
- ``use_2to3`` to True. See http://packages.python.org/distribute for more
- information.
-
- - Refactoring the code, and releasing it in several distributions.
- This work is being done in the 0.7.x series but not yet released.
-
- The roadmap is still evolving, and the page that is up-to-date is
- located at : `http://packages.python.org/distribute/roadmap`.
-
- If you install `Distribute` and want to switch back for any reason to
- `Setuptools`, get to the `Uninstallation instructions`_ section.
-
- More documentation
- ==================
-
- You can get more information in the Sphinx-based documentation, located
- at http://packages.python.org/distribute. This documentation includes the old
- Setuptools documentation that is slowly replaced, and brand new content.
-
- About the installation process
- ==============================
-
- The `Distribute` installer modifies your installation by de-activating an
- existing installation of `Setuptools` in a bootstrap process. This process
- has been tested in various installation schemes and contexts but in case of a
- bug during this process your Python installation might be left in a broken
- state. Since all modified files and directories are copied before the
- installation starts, you will be able to get back to a normal state by reading
- the instructions in the `Uninstallation instructions`_ section.
-
- In any case, it is recommended to save you `site-packages` directory before
- you start the installation of `Distribute`.
-
- -------------------------
- Installation Instructions
- -------------------------
-
- Distribute is only released as a source distribution.
-
- It can be installed using pip, and can be done so with the source tarball,
- or by using the ``distribute_setup.py`` script provided online.
-
- ``distribute_setup.py`` is the simplest and preferred way on all systems.
-
- distribute_setup.py
- ===================
-
- Download
- `distribute_setup.py <http://python-distribute.org/distribute_setup.py>`_
- and execute it, using the Python interpreter of your choice.
-
- If your shell has the ``curl`` program you can do::
-
- $ curl -O http://python-distribute.org/distribute_setup.py
- $ python distribute_setup.py
-
- Notice this file is also provided in the source release.
-
- pip
- ===
-
- Run easy_install or pip::
-
- $ pip install distribute
-
- Source installation
- ===================
-
- Download the source tarball, uncompress it, then run the install command::
-
- $ curl -O http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz
- $ tar -xzvf distribute-0.6.14.tar.gz
- $ cd distribute-0.6.14
- $ python setup.py install
-
- ---------------------------
- Uninstallation Instructions
- ---------------------------
-
- Like other distutils-based distributions, Distribute doesn't provide an
- uninstaller yet. It's all done manually! We are all waiting for PEP 376
- support in Python.
-
- Distribute is installed in three steps:
-
- 1. it gets out of the way an existing installation of Setuptools
- 2. it installs a `fake` setuptools installation
- 3. it installs distribute
-
- Distribute can be removed like this:
-
- - remove the ``distribute*.egg`` file located in your site-packages directory
- - remove the ``setuptools.pth`` file located in you site-packages directory
- - remove the easy_install script located in you ``sys.prefix/bin`` directory
- - remove the ``setuptools*.egg`` directory located in your site-packages directory,
- if any.
-
- If you want to get back to setuptools:
-
- - reinstall setuptools using its instruction.
-
- Lastly:
-
- - remove the *.OLD.* directory located in your site-packages directory if any,
- **once you have checked everything was working correctly again**.
-
- -------------------------
- Quick help for developers
- -------------------------
-
- To create an egg which is compatible with Distribute, use the same
- practice as with Setuptools, e.g.::
-
- from setuptools import setup
-
- setup(...
- )
-
- To use `pkg_resources` to access data files in the egg, you should
- require the Setuptools distribution explicitly::
-
- from setuptools import setup
-
- setup(...
- install_requires=['setuptools']
- )
-
- Only if you need Distribute-specific functionality should you depend
- on it explicitly. In this case, replace the Setuptools dependency::
-
- from setuptools import setup
-
- setup(...
- install_requires=['distribute']
- )
-
- -----------
- Install FAQ
- -----------
-
- - **Why is Distribute wrapping my Setuptools installation?**
-
- Since Distribute is a fork, and since it provides the same package
- and modules, it renames the existing Setuptools egg and inserts a
- new one which merely wraps the Distribute code. This way, full
- backwards compatibility is kept for packages which rely on the
- Setuptools modules.
-
- At the same time, packages can meet their dependency on Setuptools
- without actually installing it (which would disable Distribute).
-
- - **How does Distribute interact with virtualenv?**
-
- Everytime you create a virtualenv it will install setuptools by default.
- You either need to re-install Distribute in it right after or pass the
- ``--distribute`` option when creating it.
-
- Once installed, your virtualenv will use Distribute transparently.
-
- Although, if you have Setuptools installed in your system-wide Python,
- and if the virtualenv you are in was generated without the `--no-site-packages`
- option, the Distribute installation will stop.
-
- You need in this case to build a virtualenv with the `--no-site-packages`
- option or to install `Distribute` globally.
-
- - **How does Distribute interacts with zc.buildout?**
-
- You can use Distribute in your zc.buildout, with the --distribute option,
- starting at zc.buildout 1.4.2::
-
- $ python bootstrap.py --distribute
-
- For previous zc.buildout versions, *the only thing* you need to do
- is use the bootstrap at `http://python-distribute.org/bootstrap.py`. Run
- that bootstrap and ``bin/buildout`` (and all other buildout-generated
- scripts) will transparently use distribute instead of setuptools. You do
- not need a specific buildout release.
-
- A shared eggs directory is no problem (since 0.6.6): the setuptools egg is
- left in place unmodified. So other buildouts that do not yet use the new
- bootstrap continue to work just fine. And there is no need to list
- ``distribute`` somewhere in your eggs: using the bootstrap is enough.
-
- The source code for the bootstrap script is located at
- `http://bitbucket.org/tarek/buildout-distribute`.
-
-
-
- -----------------------------
- Feedback and getting involved
- -----------------------------
-
- - Mailing list: http://mail.python.org/mailman/listinfo/distutils-sig
- - Issue tracker: http://bitbucket.org/tarek/distribute/issues/
- - Code Repository: http://bitbucket.org/tarek/distribute
-
- =======
- CHANGES
- =======
-
- ------
- 0.6.14
- ------
-
- * Issue 170: Fixed unittest failure. Thanks to Toshio.
- * Issue 171: Fixed race condition in unittests cause deadlocks in test suite.
- * Issue 143: Fixed a lookup issue with easy_install.
- Thanks to David and Zooko.
- * Issue 174: Fixed the edit mode when its used with setuptools itself
-
- ------
- 0.6.13
- ------
-
- * Issue 160: 2.7 gives ValueError("Invalid IPv6 URL")
- * Issue 150: Fixed using ~/.local even in a --no-site-packages virtualenv
- * Issue 163: scan index links before external links, and don't use the md5 when
- comparing two distributions
-
- ------
- 0.6.12
- ------
-
- * Issue 149: Fixed various failures on 2.3/2.4
-
- ------
- 0.6.11
- ------
-
- * Found another case of SandboxViolation - fixed
- * Issue 15 and 48: Introduced a socket timeout of 15 seconds on url openings
- * Added indexsidebar.html into MANIFEST.in
- * Issue 108: Fixed TypeError with Python3.1
- * Issue 121: Fixed --help install command trying to actually install.
- * Issue 112: Added an os.makedirs so that Tarek's solution will work.
- * Issue 133: Added --no-find-links to easy_install
- * Added easy_install --user
- * Issue 100: Fixed develop --user not taking '.' in PYTHONPATH into account
- * Issue 134: removed spurious UserWarnings. Patch by VanLindberg
- * Issue 138: cant_write_to_target error when setup_requires is used.
- * Issue 147: respect the sys.dont_write_bytecode flag
-
- ------
- 0.6.10
- ------
-
- * Reverted change made for the DistributionNotFound exception because
- zc.buildout uses the exception message to get the name of the
- distribution.
-
- -----
- 0.6.9
- -----
-
- * Issue 90: unknown setuptools version can be added in the working set
- * Issue 87: setupt.py doesn't try to convert distribute_setup.py anymore
- Initial Patch by arfrever.
- * Issue 89: added a side bar with a download link to the doc.
- * Issue 86: fixed missing sentence in pkg_resources doc.
- * Added a nicer error message when a DistributionNotFound is raised.
- * Issue 80: test_develop now works with Python 3.1
- * Issue 93: upload_docs now works if there is an empty sub-directory.
- * Issue 70: exec bit on non-exec files
- * Issue 99: now the standalone easy_install command doesn't uses a
- "setup.cfg" if any exists in the working directory. It will use it
- only if triggered by ``install_requires`` from a setup.py call
- (install, develop, etc).
- * Issue 101: Allowing ``os.devnull`` in Sandbox
- * Issue 92: Fixed the "no eggs" found error with MacPort
- (platform.mac_ver() fails)
- * Issue 103: test_get_script_header_jython_workaround not run
- anymore under py3 with C or POSIX local. Contributed by Arfrever.
- * Issue 104: remvoved the assertion when the installation fails,
- with a nicer message for the end user.
- * Issue 100: making sure there's no SandboxViolation when
- the setup script patches setuptools.
-
- -----
- 0.6.8
- -----
-
- * Added "check_packages" in dist. (added in Setuptools 0.6c11)
- * Fixed the DONT_PATCH_SETUPTOOLS state.
-
- -----
- 0.6.7
- -----
-
- * Issue 58: Added --user support to the develop command
- * Issue 11: Generated scripts now wrap their call to the script entry point
- in the standard "if name == 'main'"
- * Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv
- can drive an installation that doesn't patch a global setuptools.
- * Reviewed unladen-swallow specific change from
- http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719
- and determined that it no longer applies. Distribute should work fine with
- Unladen Swallow 2009Q3.
- * Issue 21: Allow PackageIndex.open_url to gracefully handle all cases of a
- httplib.HTTPException instead of just InvalidURL and BadStatusLine.
- * Removed virtual-python.py from this distribution and updated documentation
- to point to the actively maintained virtualenv instead.
- * Issue 64: use_setuptools no longer rebuilds the distribute egg every
- time it is run
- * use_setuptools now properly respects the requested version
- * use_setuptools will no longer try to import a distribute egg for the
- wrong Python version
- * Issue 74: no_fake should be True by default.
- * Issue 72: avoid a bootstrapping issue with easy_install -U
-
- -----
- 0.6.6
- -----
-
- * Unified the bootstrap file so it works on both py2.x and py3k without 2to3
- (patch by Holger Krekel)
-
- -----
- 0.6.5
- -----
-
- * Issue 65: cli.exe and gui.exe are now generated at build time,
- depending on the platform in use.
-
- * Issue 67: Fixed doc typo (PEP 381/382)
-
- * Distribute no longer shadows setuptools if we require a 0.7-series
- setuptools. And an error is raised when installing a 0.7 setuptools with
- distribute.
-
- * When run from within buildout, no attempt is made to modify an existing
- setuptools egg, whether in a shared egg directory or a system setuptools.
-
- * Fixed a hole in sandboxing allowing builtin file to write outside of
- the sandbox.
-
- -----
- 0.6.4
- -----
-
- * Added the generation of `distribute_setup_3k.py` during the release.
- This close http://bitbucket.org/tarek/distribute/issue/52.
-
- * Added an upload_docs command to easily upload project documentation to
- PyPI's http://packages.python.org.
- This close http://bitbucket.org/tarek/distribute/issue/56.
-
- * Fixed a bootstrap bug on the use_setuptools() API.
-
- -----
- 0.6.3
- -----
-
- setuptools
- ==========
-
- * Fixed a bunch of calls to file() that caused crashes on Python 3.
-
- bootstrapping
- =============
-
- * Fixed a bug in sorting that caused bootstrap to fail on Python 3.
-
- -----
- 0.6.2
- -----
-
- setuptools
- ==========
-
- * Added Python 3 support; see docs/python3.txt.
- This closes http://bugs.python.org/setuptools/issue39.
-
- * Added option to run 2to3 automatically when installing on Python 3.
- This closes http://bitbucket.org/tarek/distribute/issue/31.
-
- * Fixed invalid usage of requirement.parse, that broke develop -d.
- This closes http://bugs.python.org/setuptools/issue44.
-
- * Fixed script launcher for 64-bit Windows.
- This closes http://bugs.python.org/setuptools/issue2.
-
- * KeyError when compiling extensions.
- This closes http://bugs.python.org/setuptools/issue41.
-
- bootstrapping
- =============
-
- * Fixed bootstrap not working on Windows.
- This closes http://bitbucket.org/tarek/distribute/issue/49.
-
- * Fixed 2.6 dependencies.
- This closes http://bitbucket.org/tarek/distribute/issue/50.
-
- * Make sure setuptools is patched when running through easy_install
- This closes http://bugs.python.org/setuptools/issue40.
-
- -----
- 0.6.1
- -----
-
- setuptools
- ==========
-
- * package_index.urlopen now catches BadStatusLine and malformed url errors.
- This closes http://bitbucket.org/tarek/distribute/issue/16 and
- http://bitbucket.org/tarek/distribute/issue/18.
-
- * zip_ok is now False by default. This closes
- http://bugs.python.org/setuptools/issue33.
-
- * Fixed invalid URL error catching. http://bugs.python.org/setuptools/issue20.
-
- * Fixed invalid bootstraping with easy_install installation
- http://bitbucket.org/tarek/distribute/issue/40.
- Thanks to Florian Schulze for the help.
-
- * Removed buildout/bootstrap.py. A new repository will create a specific
- bootstrap.py script.
-
-
- bootstrapping
- =============
-
- * The boostrap process leave setuptools alone if detected in the system
- and --root or --prefix is provided, but is not in the same location.
- This closes http://bitbucket.org/tarek/distribute/issue/10.
-
- ---
- 0.6
- ---
-
- setuptools
- ==========
-
- * Packages required at build time where not fully present at install time.
- This closes http://bitbucket.org/tarek/distribute/issue/12.
-
- * Protected against failures in tarfile extraction. This closes
- http://bitbucket.org/tarek/distribute/issue/10.
-
- * Made Jython api_tests.txt doctest compatible. This closes
- http://bitbucket.org/tarek/distribute/issue/7.
-
- * sandbox.py replaced builtin type file with builtin function open. This
- closes http://bitbucket.org/tarek/distribute/issue/6.
-
- * Immediately close all file handles. This closes
- http://bitbucket.org/tarek/distribute/issue/3.
-
- * Added compatibility with Subversion 1.6. This references
- http://bitbucket.org/tarek/distribute/issue/1.
-
- pkg_resources
- =============
-
- * Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API
- instead. Based on a patch from ronaldoussoren. This closes
- http://bitbucket.org/tarek/distribute/issue/5.
-
- * Fixed a SandboxViolation for mkdir that could occur in certain cases.
- This closes http://bitbucket.org/tarek/distribute/issue/13.
-
- * Allow to find_on_path on systems with tight permissions to fail gracefully.
- This closes http://bitbucket.org/tarek/distribute/issue/9.
-
- * Corrected inconsistency between documentation and code of add_entry.
- This closes http://bitbucket.org/tarek/distribute/issue/8.
-
- * Immediately close all file handles. This closes
- http://bitbucket.org/tarek/distribute/issue/3.
-
- easy_install
- ============
-
- * Immediately close all file handles. This closes
- http://bitbucket.org/tarek/distribute/issue/3.
-
-
-Keywords: CPAN PyPI distutils eggs package management
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Python Software Foundation License
-Classifier: License :: OSI Approved :: Zope Public License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Classifier: Topic :: System :: Archiving :: Packaging
-Classifier: Topic :: System :: Systems Administration
-Classifier: Topic :: Utilities
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt
deleted file mode 100644
index fc0a26bd..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt
+++ /dev/null
@@ -1,84 +0,0 @@
-CHANGES.txt
-CONTRIBUTORS.txt
-DEVGUIDE.txt
-MANIFEST.in
-README.txt
-distribute_setup.py
-easy_install.py
-launcher.c
-pkg_resources.py
-setup.cfg
-setup.py
-site.py
-distribute.egg-info/PKG-INFO
-distribute.egg-info/SOURCES.txt
-distribute.egg-info/dependency_links.txt
-distribute.egg-info/entry_points.txt
-distribute.egg-info/top_level.txt
-distribute.egg-info/zip-safe
-docs/Makefile
-docs/conf.py
-docs/easy_install.txt
-docs/index.txt
-docs/pkg_resources.txt
-docs/python3.txt
-docs/roadmap.txt
-docs/setuptools.txt
-docs/using.txt
-docs/_templates/indexsidebar.html
-docs/_theme/nature/theme.conf
-docs/_theme/nature/static/nature.css_t
-docs/_theme/nature/static/pygments.css
-setuptools/__init__.py
-setuptools/archive_util.py
-setuptools/cli.exe
-setuptools/depends.py
-setuptools/dist.py
-setuptools/extension.py
-setuptools/gui.exe
-setuptools/package_index.py
-setuptools/sandbox.py
-setuptools/command/__init__.py
-setuptools/command/alias.py
-setuptools/command/bdist_egg.py
-setuptools/command/bdist_rpm.py
-setuptools/command/bdist_wininst.py
-setuptools/command/build_ext.py
-setuptools/command/build_py.py
-setuptools/command/develop.py
-setuptools/command/easy_install.py
-setuptools/command/egg_info.py
-setuptools/command/install.py
-setuptools/command/install_egg_info.py
-setuptools/command/install_lib.py
-setuptools/command/install_scripts.py
-setuptools/command/register.py
-setuptools/command/rotate.py
-setuptools/command/saveopts.py
-setuptools/command/sdist.py
-setuptools/command/setopt.py
-setuptools/command/test.py
-setuptools/command/upload.py
-setuptools/command/upload_docs.py
-setuptools/tests/__init__.py
-setuptools/tests/doctest.py
-setuptools/tests/server.py
-setuptools/tests/test_build_ext.py
-setuptools/tests/test_develop.py
-setuptools/tests/test_easy_install.py
-setuptools/tests/test_packageindex.py
-setuptools/tests/test_resources.py
-setuptools/tests/test_sandbox.py
-setuptools/tests/test_upload_docs.py
-setuptools/tests/win_script_wrapper.txt
-setuptools/tests/indexes/test_links_priority/external.html
-setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
-tests/api_tests.txt
-tests/install_test.py
-tests/manual_test.py
-tests/test_distribute_setup.py
-tests/shlib_test/hello.c
-tests/shlib_test/hello.pyx
-tests/shlib_test/hellolib.c
-tests/shlib_test/setup.py
-tests/shlib_test/test_hello.py \ No newline at end of file
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt
deleted file mode 100644
index 8b137891..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt
deleted file mode 100644
index 9fd41758..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt
+++ /dev/null
@@ -1,61 +0,0 @@
-[distutils.commands]
-bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
-rotate = setuptools.command.rotate:rotate
-develop = setuptools.command.develop:develop
-setopt = setuptools.command.setopt:setopt
-build_py = setuptools.command.build_py:build_py
-saveopts = setuptools.command.saveopts:saveopts
-egg_info = setuptools.command.egg_info:egg_info
-register = setuptools.command.register:register
-upload_docs = setuptools.command.upload_docs:upload_docs
-install_egg_info = setuptools.command.install_egg_info:install_egg_info
-alias = setuptools.command.alias:alias
-easy_install = setuptools.command.easy_install:easy_install
-install_scripts = setuptools.command.install_scripts:install_scripts
-bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst
-bdist_egg = setuptools.command.bdist_egg:bdist_egg
-install = setuptools.command.install:install
-test = setuptools.command.test:test
-install_lib = setuptools.command.install_lib:install_lib
-build_ext = setuptools.command.build_ext:build_ext
-sdist = setuptools.command.sdist:sdist
-
-[egg_info.writers]
-dependency_links.txt = setuptools.command.egg_info:overwrite_arg
-requires.txt = setuptools.command.egg_info:write_requirements
-PKG-INFO = setuptools.command.egg_info:write_pkg_info
-eager_resources.txt = setuptools.command.egg_info:overwrite_arg
-top_level.txt = setuptools.command.egg_info:write_toplevel_names
-namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
-entry_points.txt = setuptools.command.egg_info:write_entries
-depends.txt = setuptools.command.egg_info:warn_depends_obsolete
-
-[console_scripts]
-easy_install = setuptools.command.easy_install:main
-easy_install-2.7 = setuptools.command.easy_install:main
-
-[setuptools.file_finders]
-svn_cvs = setuptools.command.sdist:_default_revctrl
-
-[distutils.setup_keywords]
-dependency_links = setuptools.dist:assert_string_list
-entry_points = setuptools.dist:check_entry_points
-extras_require = setuptools.dist:check_extras
-package_data = setuptools.dist:check_package_data
-install_requires = setuptools.dist:check_requirements
-use_2to3 = setuptools.dist:assert_bool
-use_2to3_fixers = setuptools.dist:assert_string_list
-include_package_data = setuptools.dist:assert_bool
-exclude_package_data = setuptools.dist:check_package_data
-namespace_packages = setuptools.dist:check_nsp
-test_suite = setuptools.dist:check_test_suite
-eager_resources = setuptools.dist:assert_string_list
-zip_safe = setuptools.dist:assert_bool
-test_loader = setuptools.dist:check_importable
-packages = setuptools.dist:check_packages
-convert_2to3_doctests = setuptools.dist:assert_string_list
-tests_require = setuptools.dist:check_requirements
-
-[setuptools.installation]
-eggsecutable = setuptools.command.easy_install:bootstrap
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt
deleted file mode 100644
index ef77c7c1..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-easy_install
-pkg_resources
-setuptools
-site
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe
deleted file mode 100644
index 8b137891..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py
deleted file mode 100755
index d87e9840..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py
+++ /dev/null
@@ -1,5 +0,0 @@
-"""Run the EasyInstall command"""
-
-if __name__ == '__main__':
- from setuptools.command.easy_install import main
- main()
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py
deleted file mode 100755
index 30dbc188..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py
+++ /dev/null
@@ -1,2693 +0,0 @@
-"""Package resource API
---------------------
-
-A resource is a logical file contained within a package, or a logical
-subdirectory thereof. The package resource API expects resource names
-to have their path parts separated with ``/``, *not* whatever the local
-path separator is. Do not use os.path operations to manipulate resource
-names being passed into the API.
-
-The package resource API is designed to work with normal filesystem packages,
-.egg files, and unpacked .egg files. It can also work in a limited way with
-.zip files and with custom PEP 302 loaders that support the ``get_data()``
-method.
-"""
-
-import sys, os, zipimport, time, re, imp, types
-from urlparse import urlparse, urlunparse
-
-try:
- frozenset
-except NameError:
- from sets import ImmutableSet as frozenset
-
-# capture these to bypass sandboxing
-from os import utime
-try:
- from os import mkdir, rename, unlink
- WRITE_SUPPORT = True
-except ImportError:
- # no write support, probably under GAE
- WRITE_SUPPORT = False
-
-from os import open as os_open
-from os.path import isdir, split
-
-# This marker is used to simplify the process that checks is the
-# setuptools package was installed by the Setuptools project
-# or by the Distribute project, in case Setuptools creates
-# a distribution with the same version.
-#
-# The bootstrapping script for instance, will check if this
-# attribute is present to decide wether to reinstall the package
-_distribute = True
-
-def _bypass_ensure_directory(name, mode=0777):
- # Sandbox-bypassing version of ensure_directory()
- if not WRITE_SUPPORT:
- raise IOError('"os.mkdir" not supported on this platform.')
- dirname, filename = split(name)
- if dirname and filename and not isdir(dirname):
- _bypass_ensure_directory(dirname)
- mkdir(dirname, mode)
-
-
-
-
-
-
-
-
-def get_supported_platform():
- """Return this platform's maximum compatible version.
-
- distutils.util.get_platform() normally reports the minimum version
- of Mac OS X that would be required to *use* extensions produced by
- distutils. But what we want when checking compatibility is to know the
- version of Mac OS X that we are *running*. To allow usage of packages that
- explicitly require a newer version of Mac OS X, we must also know the
- current version of the OS.
-
- If this condition occurs for any other platform with a version in its
- platform strings, this function should be extended accordingly.
- """
- plat = get_build_platform(); m = macosVersionString.match(plat)
- if m is not None and sys.platform == "darwin":
- try:
- plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
- except ValueError:
- pass # not Mac OS X
- return plat
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-__all__ = [
- # Basic resource access and distribution/entry point discovery
- 'require', 'run_script', 'get_provider', 'get_distribution',
- 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
- 'resource_string', 'resource_stream', 'resource_filename',
- 'resource_listdir', 'resource_exists', 'resource_isdir',
-
- # Environmental control
- 'declare_namespace', 'working_set', 'add_activation_listener',
- 'find_distributions', 'set_extraction_path', 'cleanup_resources',
- 'get_default_cache',
-
- # Primary implementation classes
- 'Environment', 'WorkingSet', 'ResourceManager',
- 'Distribution', 'Requirement', 'EntryPoint',
-
- # Exceptions
- 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
- 'ExtractionError',
-
- # Parsing functions and string utilities
- 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
- 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
- 'safe_extra', 'to_filename',
-
- # filesystem utilities
- 'ensure_directory', 'normalize_path',
-
- # Distribution "precedence" constants
- 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
-
- # "Provider" interfaces, implementations, and registration/lookup APIs
- 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
- 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
- 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
- 'register_finder', 'register_namespace_handler', 'register_loader_type',
- 'fixup_namespace_packages', 'get_importer',
-
- # Deprecated/backward compatibility only
- 'run_main', 'AvailableDistributions',
-]
-class ResolutionError(Exception):
- """Abstract base for dependency resolution errors"""
- def __repr__(self):
- return self.__class__.__name__+repr(self.args)
-
-class VersionConflict(ResolutionError):
- """An already-installed version conflicts with the requested version"""
-
-class DistributionNotFound(ResolutionError):
- """A requested distribution was not found"""
-
-class UnknownExtra(ResolutionError):
- """Distribution doesn't have an "extra feature" of the given name"""
-_provider_factories = {}
-
-PY_MAJOR = sys.version[:3]
-EGG_DIST = 3
-BINARY_DIST = 2
-SOURCE_DIST = 1
-CHECKOUT_DIST = 0
-DEVELOP_DIST = -1
-
-def register_loader_type(loader_type, provider_factory):
- """Register `provider_factory` to make providers for `loader_type`
-
- `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
- and `provider_factory` is a function that, passed a *module* object,
- returns an ``IResourceProvider`` for that module.
- """
- _provider_factories[loader_type] = provider_factory
-
-def get_provider(moduleOrReq):
- """Return an IResourceProvider for the named module or requirement"""
- if isinstance(moduleOrReq,Requirement):
- return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
- try:
- module = sys.modules[moduleOrReq]
- except KeyError:
- __import__(moduleOrReq)
- module = sys.modules[moduleOrReq]
- loader = getattr(module, '__loader__', None)
- return _find_adapter(_provider_factories, loader)(module)
-
-def _macosx_vers(_cache=[]):
- if not _cache:
- import platform
- version = platform.mac_ver()[0]
- # fallback for MacPorts
- if version == '':
- import plistlib
- plist = '/System/Library/CoreServices/SystemVersion.plist'
- if os.path.exists(plist):
- if hasattr(plistlib, 'readPlist'):
- plist_content = plistlib.readPlist(plist)
- if 'ProductVersion' in plist_content:
- version = plist_content['ProductVersion']
-
- _cache.append(version.split('.'))
- return _cache[0]
-
-def _macosx_arch(machine):
- return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
-
-def get_build_platform():
- """Return this platform's string for platform-specific distributions
-
- XXX Currently this is the same as ``distutils.util.get_platform()``, but it
- needs some hacks for Linux and Mac OS X.
- """
- try:
- from distutils.util import get_platform
- except ImportError:
- from sysconfig import get_platform
-
- plat = get_platform()
- if sys.platform == "darwin" and not plat.startswith('macosx-'):
- try:
- version = _macosx_vers()
- machine = os.uname()[4].replace(" ", "_")
- return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
- _macosx_arch(machine))
- except ValueError:
- # if someone is running a non-Mac darwin system, this will fall
- # through to the default implementation
- pass
- return plat
-
-macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
-darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
-get_platform = get_build_platform # XXX backward compat
-
-def compatible_platforms(provided,required):
- """Can code for the `provided` platform run on the `required` platform?
-
- Returns true if either platform is ``None``, or the platforms are equal.
-
- XXX Needs compatibility checks for Linux and other unixy OSes.
- """
- if provided is None or required is None or provided==required:
- return True # easy case
-
- # Mac OS X special cases
- reqMac = macosVersionString.match(required)
- if reqMac:
- provMac = macosVersionString.match(provided)
-
- # is this a Mac package?
- if not provMac:
- # this is backwards compatibility for packages built before
- # setuptools 0.6. All packages built after this point will
- # use the new macosx designation.
- provDarwin = darwinVersionString.match(provided)
- if provDarwin:
- dversion = int(provDarwin.group(1))
- macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
- if dversion == 7 and macosversion >= "10.3" or \
- dversion == 8 and macosversion >= "10.4":
-
- #import warnings
- #warnings.warn("Mac eggs should be rebuilt to "
- # "use the macosx designation instead of darwin.",
- # category=DeprecationWarning)
- return True
- return False # egg isn't macosx or legacy darwin
-
- # are they the same major version and machine type?
- if provMac.group(1) != reqMac.group(1) or \
- provMac.group(3) != reqMac.group(3):
- return False
-
-
-
- # is the required OS major update >= the provided one?
- if int(provMac.group(2)) > int(reqMac.group(2)):
- return False
-
- return True
-
- # XXX Linux and other platforms' special cases should go here
- return False
-
-
-def run_script(dist_spec, script_name):
- """Locate distribution `dist_spec` and run its `script_name` script"""
- ns = sys._getframe(1).f_globals
- name = ns['__name__']
- ns.clear()
- ns['__name__'] = name
- require(dist_spec)[0].run_script(script_name, ns)
-
-run_main = run_script # backward compatibility
-
-def get_distribution(dist):
- """Return a current distribution object for a Requirement or string"""
- if isinstance(dist,basestring): dist = Requirement.parse(dist)
- if isinstance(dist,Requirement): dist = get_provider(dist)
- if not isinstance(dist,Distribution):
- raise TypeError("Expected string, Requirement, or Distribution", dist)
- return dist
-
-def load_entry_point(dist, group, name):
- """Return `name` entry point of `group` for `dist` or raise ImportError"""
- return get_distribution(dist).load_entry_point(group, name)
-
-def get_entry_map(dist, group=None):
- """Return the entry point map for `group`, or the full entry map"""
- return get_distribution(dist).get_entry_map(group)
-
-def get_entry_info(dist, group, name):
- """Return the EntryPoint object for `group`+`name`, or ``None``"""
- return get_distribution(dist).get_entry_info(group, name)
-
-
-class IMetadataProvider:
-
- def has_metadata(name):
- """Does the package's distribution contain the named metadata?"""
-
- def get_metadata(name):
- """The named metadata resource as a string"""
-
- def get_metadata_lines(name):
- """Yield named metadata resource as list of non-blank non-comment lines
-
- Leading and trailing whitespace is stripped from each line, and lines
- with ``#`` as the first non-blank character are omitted."""
-
- def metadata_isdir(name):
- """Is the named metadata a directory? (like ``os.path.isdir()``)"""
-
- def metadata_listdir(name):
- """List of metadata names in the directory (like ``os.listdir()``)"""
-
- def run_script(script_name, namespace):
- """Execute the named script in the supplied namespace dictionary"""
-
-
-
-
-
-
-
-
-
-
-class IResourceProvider(IMetadataProvider):
- """An object that provides access to package resources"""
-
- def get_resource_filename(manager, resource_name):
- """Return a true filesystem path for `resource_name`
-
- `manager` must be an ``IResourceManager``"""
-
- def get_resource_stream(manager, resource_name):
- """Return a readable file-like object for `resource_name`
-
- `manager` must be an ``IResourceManager``"""
-
- def get_resource_string(manager, resource_name):
- """Return a string containing the contents of `resource_name`
-
- `manager` must be an ``IResourceManager``"""
-
- def has_resource(resource_name):
- """Does the package contain the named resource?"""
-
- def resource_isdir(resource_name):
- """Is the named resource a directory? (like ``os.path.isdir()``)"""
-
- def resource_listdir(resource_name):
- """List of resource names in the directory (like ``os.listdir()``)"""
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class WorkingSet(object):
- """A collection of active distributions on sys.path (or a similar list)"""
-
- def __init__(self, entries=None):
- """Create working set from list of path entries (default=sys.path)"""
- self.entries = []
- self.entry_keys = {}
- self.by_key = {}
- self.callbacks = []
-
- if entries is None:
- entries = sys.path
-
- for entry in entries:
- self.add_entry(entry)
-
-
- def add_entry(self, entry):
- """Add a path item to ``.entries``, finding any distributions on it
-
- ``find_distributions(entry,True)`` is used to find distributions
- corresponding to the path entry, and they are added. `entry` is
- always appended to ``.entries``, even if it is already present.
- (This is because ``sys.path`` can contain the same value more than
- once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
- equal ``sys.path``.)
- """
- self.entry_keys.setdefault(entry, [])
- self.entries.append(entry)
- for dist in find_distributions(entry, True):
- self.add(dist, entry, False)
-
-
- def __contains__(self,dist):
- """True if `dist` is the active distribution for its project"""
- return self.by_key.get(dist.key) == dist
-
-
-
-
-
- def find(self, req):
- """Find a distribution matching requirement `req`
-
- If there is an active distribution for the requested project, this
- returns it as long as it meets the version requirement specified by
- `req`. But, if there is an active distribution for the project and it
- does *not* meet the `req` requirement, ``VersionConflict`` is raised.
- If there is no active distribution for the requested project, ``None``
- is returned.
- """
- dist = self.by_key.get(req.key)
- if dist is not None and dist not in req:
- raise VersionConflict(dist,req) # XXX add more info
- else:
- return dist
-
- def iter_entry_points(self, group, name=None):
- """Yield entry point objects from `group` matching `name`
-
- If `name` is None, yields all entry points in `group` from all
- distributions in the working set, otherwise only ones matching
- both `group` and `name` are yielded (in distribution order).
- """
- for dist in self:
- entries = dist.get_entry_map(group)
- if name is None:
- for ep in entries.values():
- yield ep
- elif name in entries:
- yield entries[name]
-
- def run_script(self, requires, script_name):
- """Locate distribution for `requires` and run `script_name` script"""
- ns = sys._getframe(1).f_globals
- name = ns['__name__']
- ns.clear()
- ns['__name__'] = name
- self.require(requires)[0].run_script(script_name, ns)
-
-
-
- def __iter__(self):
- """Yield distributions for non-duplicate projects in the working set
-
- The yield order is the order in which the items' path entries were
- added to the working set.
- """
- seen = {}
- for item in self.entries:
- for key in self.entry_keys[item]:
- if key not in seen:
- seen[key]=1
- yield self.by_key[key]
-
- def add(self, dist, entry=None, insert=True):
- """Add `dist` to working set, associated with `entry`
-
- If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
- On exit from this routine, `entry` is added to the end of the working
- set's ``.entries`` (if it wasn't already present).
-
- `dist` is only added to the working set if it's for a project that
- doesn't already have a distribution in the set. If it's added, any
- callbacks registered with the ``subscribe()`` method will be called.
- """
- if insert:
- dist.insert_on(self.entries, entry)
-
- if entry is None:
- entry = dist.location
- keys = self.entry_keys.setdefault(entry,[])
- keys2 = self.entry_keys.setdefault(dist.location,[])
- if dist.key in self.by_key:
- return # ignore hidden distros
-
- self.by_key[dist.key] = dist
- if dist.key not in keys:
- keys.append(dist.key)
- if dist.key not in keys2:
- keys2.append(dist.key)
- self._added_new(dist)
-
- def resolve(self, requirements, env=None, installer=None, replacement=True):
- """List all distributions needed to (recursively) meet `requirements`
-
- `requirements` must be a sequence of ``Requirement`` objects. `env`,
- if supplied, should be an ``Environment`` instance. If
- not supplied, it defaults to all distributions available within any
- entry or distribution in the working set. `installer`, if supplied,
- will be invoked with each requirement that cannot be met by an
- already-installed distribution; it should return a ``Distribution`` or
- ``None``.
- """
-
- requirements = list(requirements)[::-1] # set up the stack
- processed = {} # set of processed requirements
- best = {} # key -> dist
- to_activate = []
-
- while requirements:
- req = requirements.pop(0) # process dependencies breadth-first
- if _override_setuptools(req) and replacement:
- req = Requirement.parse('distribute')
-
- if req in processed:
- # Ignore cyclic or redundant dependencies
- continue
- dist = best.get(req.key)
- if dist is None:
- # Find the best distribution and add it to the map
- dist = self.by_key.get(req.key)
- if dist is None:
- if env is None:
- env = Environment(self.entries)
- dist = best[req.key] = env.best_match(req, self, installer)
- if dist is None:
- #msg = ("The '%s' distribution was not found on this "
- # "system, and is required by this application.")
- #raise DistributionNotFound(msg % req)
-
- # unfortunately, zc.buildout uses a str(err)
- # to get the name of the distribution here..
- raise DistributionNotFound(req)
- to_activate.append(dist)
- if dist not in req:
- # Oops, the "best" so far conflicts with a dependency
- raise VersionConflict(dist,req) # XXX put more info here
- requirements.extend(dist.requires(req.extras)[::-1])
- processed[req] = True
-
- return to_activate # return list of distros to activate
-
- def find_plugins(self,
- plugin_env, full_env=None, installer=None, fallback=True
- ):
- """Find all activatable distributions in `plugin_env`
-
- Example usage::
-
- distributions, errors = working_set.find_plugins(
- Environment(plugin_dirlist)
- )
- map(working_set.add, distributions) # add plugins+libs to sys.path
- print 'Could not load', errors # display errors
-
- The `plugin_env` should be an ``Environment`` instance that contains
- only distributions that are in the project's "plugin directory" or
- directories. The `full_env`, if supplied, should be an ``Environment``
- contains all currently-available distributions. If `full_env` is not
- supplied, one is created automatically from the ``WorkingSet`` this
- method is called on, which will typically mean that every directory on
- ``sys.path`` will be scanned for distributions.
-
- `installer` is a standard installer callback as used by the
- ``resolve()`` method. The `fallback` flag indicates whether we should
- attempt to resolve older versions of a plugin if the newest version
- cannot be resolved.
-
- This method returns a 2-tuple: (`distributions`, `error_info`), where
- `distributions` is a list of the distributions found in `plugin_env`
- that were loadable, along with any other distributions that are needed
- to resolve their dependencies. `error_info` is a dictionary mapping
- unloadable plugin distributions to an exception instance describing the
- error that occurred. Usually this will be a ``DistributionNotFound`` or
- ``VersionConflict`` instance.
- """
-
- plugin_projects = list(plugin_env)
- plugin_projects.sort() # scan project names in alphabetic order
-
- error_info = {}
- distributions = {}
-
- if full_env is None:
- env = Environment(self.entries)
- env += plugin_env
- else:
- env = full_env + plugin_env
-
- shadow_set = self.__class__([])
- map(shadow_set.add, self) # put all our entries in shadow_set
-
- for project_name in plugin_projects:
-
- for dist in plugin_env[project_name]:
-
- req = [dist.as_requirement()]
-
- try:
- resolvees = shadow_set.resolve(req, env, installer)
-
- except ResolutionError,v:
- error_info[dist] = v # save error info
- if fallback:
- continue # try the next older version of project
- else:
- break # give up on this project, keep going
-
- else:
- map(shadow_set.add, resolvees)
- distributions.update(dict.fromkeys(resolvees))
-
- # success, no need to try any more versions of this project
- break
-
- distributions = list(distributions)
- distributions.sort()
-
- return distributions, error_info
-
-
-
-
-
- def require(self, *requirements):
- """Ensure that distributions matching `requirements` are activated
-
- `requirements` must be a string or a (possibly-nested) sequence
- thereof, specifying the distributions and versions required. The
- return value is a sequence of the distributions that needed to be
- activated to fulfill the requirements; all relevant distributions are
- included, even if they were already activated in this working set.
- """
-
- needed = self.resolve(parse_requirements(requirements))
-
- for dist in needed:
- self.add(dist)
-
- return needed
-
-
- def subscribe(self, callback):
- """Invoke `callback` for all distributions (including existing ones)"""
- if callback in self.callbacks:
- return
- self.callbacks.append(callback)
- for dist in self:
- callback(dist)
-
-
- def _added_new(self, dist):
- for callback in self.callbacks:
- callback(dist)
-
-
-
-
-
-
-
-
-
-
-
-class Environment(object):
- """Searchable snapshot of distributions on a search path"""
-
- def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
- """Snapshot distributions available on a search path
-
- Any distributions found on `search_path` are added to the environment.
- `search_path` should be a sequence of ``sys.path`` items. If not
- supplied, ``sys.path`` is used.
-
- `platform` is an optional string specifying the name of the platform
- that platform-specific distributions must be compatible with. If
- unspecified, it defaults to the current platform. `python` is an
- optional string naming the desired version of Python (e.g. ``'2.4'``);
- it defaults to the current version.
-
- You may explicitly set `platform` (and/or `python`) to ``None`` if you
- wish to map *all* distributions, not just those compatible with the
- running platform or Python version.
- """
- self._distmap = {}
- self._cache = {}
- self.platform = platform
- self.python = python
- self.scan(search_path)
-
- def can_add(self, dist):
- """Is distribution `dist` acceptable for this environment?
-
- The distribution must match the platform and python version
- requirements specified when this environment was created, or False
- is returned.
- """
- return (self.python is None or dist.py_version is None
- or dist.py_version==self.python) \
- and compatible_platforms(dist.platform,self.platform)
-
- def remove(self, dist):
- """Remove `dist` from the environment"""
- self._distmap[dist.key].remove(dist)
-
- def scan(self, search_path=None):
- """Scan `search_path` for distributions usable in this environment
-
- Any distributions found are added to the environment.
- `search_path` should be a sequence of ``sys.path`` items. If not
- supplied, ``sys.path`` is used. Only distributions conforming to
- the platform/python version defined at initialization are added.
- """
- if search_path is None:
- search_path = sys.path
-
- for item in search_path:
- for dist in find_distributions(item):
- self.add(dist)
-
- def __getitem__(self,project_name):
- """Return a newest-to-oldest list of distributions for `project_name`
- """
- try:
- return self._cache[project_name]
- except KeyError:
- project_name = project_name.lower()
- if project_name not in self._distmap:
- return []
-
- if project_name not in self._cache:
- dists = self._cache[project_name] = self._distmap[project_name]
- _sort_dists(dists)
-
- return self._cache[project_name]
-
- def add(self,dist):
- """Add `dist` if we ``can_add()`` it and it isn't already added"""
- if self.can_add(dist) and dist.has_version():
- dists = self._distmap.setdefault(dist.key,[])
- if dist not in dists:
- dists.append(dist)
- if dist.key in self._cache:
- _sort_dists(self._cache[dist.key])
-
-
- def best_match(self, req, working_set, installer=None):
- """Find distribution best matching `req` and usable on `working_set`
-
- This calls the ``find(req)`` method of the `working_set` to see if a
- suitable distribution is already active. (This may raise
- ``VersionConflict`` if an unsuitable version of the project is already
- active in the specified `working_set`.) If a suitable distribution
- isn't active, this method returns the newest distribution in the
- environment that meets the ``Requirement`` in `req`. If no suitable
- distribution is found, and `installer` is supplied, then the result of
- calling the environment's ``obtain(req, installer)`` method will be
- returned.
- """
- dist = working_set.find(req)
- if dist is not None:
- return dist
- for dist in self[req.key]:
- if dist in req:
- return dist
- return self.obtain(req, installer) # try and download/install
-
- def obtain(self, requirement, installer=None):
- """Obtain a distribution matching `requirement` (e.g. via download)
-
- Obtain a distro that matches requirement (e.g. via download). In the
- base ``Environment`` class, this routine just returns
- ``installer(requirement)``, unless `installer` is None, in which case
- None is returned instead. This method is a hook that allows subclasses
- to attempt other ways of obtaining a distribution before falling back
- to the `installer` argument."""
- if installer is not None:
- return installer(requirement)
-
- def __iter__(self):
- """Yield the unique project names of the available distributions"""
- for key in self._distmap.keys():
- if self[key]: yield key
-
-
-
-
- def __iadd__(self, other):
- """In-place addition of a distribution or environment"""
- if isinstance(other,Distribution):
- self.add(other)
- elif isinstance(other,Environment):
- for project in other:
- for dist in other[project]:
- self.add(dist)
- else:
- raise TypeError("Can't add %r to environment" % (other,))
- return self
-
- def __add__(self, other):
- """Add an environment or distribution to an environment"""
- new = self.__class__([], platform=None, python=None)
- for env in self, other:
- new += env
- return new
-
-
-AvailableDistributions = Environment # XXX backward compatibility
-
-
-class ExtractionError(RuntimeError):
- """An error occurred extracting a resource
-
- The following attributes are available from instances of this exception:
-
- manager
- The resource manager that raised this exception
-
- cache_path
- The base directory for resource extraction
-
- original_error
- The exception instance that caused extraction to fail
- """
-
-
-
-
-class ResourceManager:
- """Manage resource extraction and packages"""
- extraction_path = None
-
- def __init__(self):
- self.cached_files = {}
-
- def resource_exists(self, package_or_requirement, resource_name):
- """Does the named resource exist?"""
- return get_provider(package_or_requirement).has_resource(resource_name)
-
- def resource_isdir(self, package_or_requirement, resource_name):
- """Is the named resource an existing directory?"""
- return get_provider(package_or_requirement).resource_isdir(
- resource_name
- )
-
- def resource_filename(self, package_or_requirement, resource_name):
- """Return a true filesystem path for specified resource"""
- return get_provider(package_or_requirement).get_resource_filename(
- self, resource_name
- )
-
- def resource_stream(self, package_or_requirement, resource_name):
- """Return a readable file-like object for specified resource"""
- return get_provider(package_or_requirement).get_resource_stream(
- self, resource_name
- )
-
- def resource_string(self, package_or_requirement, resource_name):
- """Return specified resource as a string"""
- return get_provider(package_or_requirement).get_resource_string(
- self, resource_name
- )
-
- def resource_listdir(self, package_or_requirement, resource_name):
- """List the contents of the named resource directory"""
- return get_provider(package_or_requirement).resource_listdir(
- resource_name
- )
-
- def extraction_error(self):
- """Give an error message for problems extracting file(s)"""
-
- old_exc = sys.exc_info()[1]
- cache_path = self.extraction_path or get_default_cache()
-
- err = ExtractionError("""Can't extract file(s) to egg cache
-
-The following error occurred while trying to extract file(s) to the Python egg
-cache:
-
- %s
-
-The Python egg cache directory is currently set to:
-
- %s
-
-Perhaps your account does not have write access to this directory? You can
-change the cache directory by setting the PYTHON_EGG_CACHE environment
-variable to point to an accessible directory.
-""" % (old_exc, cache_path)
- )
- err.manager = self
- err.cache_path = cache_path
- err.original_error = old_exc
- raise err
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def get_cache_path(self, archive_name, names=()):
- """Return absolute location in cache for `archive_name` and `names`
-
- The parent directory of the resulting path will be created if it does
- not already exist. `archive_name` should be the base filename of the
- enclosing egg (which may not be the name of the enclosing zipfile!),
- including its ".egg" extension. `names`, if provided, should be a
- sequence of path name parts "under" the egg's extraction location.
-
- This method should only be called by resource providers that need to
- obtain an extraction location, and only for names they intend to
- extract, as it tracks the generated names for possible cleanup later.
- """
- extract_path = self.extraction_path or get_default_cache()
- target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
- try:
- _bypass_ensure_directory(target_path)
- except:
- self.extraction_error()
-
- self.cached_files[target_path] = 1
- return target_path
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def postprocess(self, tempname, filename):
- """Perform any platform-specific postprocessing of `tempname`
-
- This is where Mac header rewrites should be done; other platforms don't
- have anything special they should do.
-
- Resource providers should call this method ONLY after successfully
- extracting a compressed resource. They must NOT call it on resources
- that are already in the filesystem.
-
- `tempname` is the current (temporary) name of the file, and `filename`
- is the name it will be renamed to by the caller after this routine
- returns.
- """
-
- if os.name == 'posix':
- # Make the resource executable
- mode = ((os.stat(tempname).st_mode) | 0555) & 07777
- os.chmod(tempname, mode)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def set_extraction_path(self, path):
- """Set the base path where resources will be extracted to, if needed.
-
- If you do not call this routine before any extractions take place, the
- path defaults to the return value of ``get_default_cache()``. (Which
- is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
- platform-specific fallbacks. See that routine's documentation for more
- details.)
-
- Resources are extracted to subdirectories of this path based upon
- information given by the ``IResourceProvider``. You may set this to a
- temporary directory, but then you must call ``cleanup_resources()`` to
- delete the extracted files when done. There is no guarantee that
- ``cleanup_resources()`` will be able to remove all extracted files.
-
- (Note: you may not change the extraction path for a given resource
- manager once resources have been extracted, unless you first call
- ``cleanup_resources()``.)
- """
- if self.cached_files:
- raise ValueError(
- "Can't change extraction path, files already extracted"
- )
-
- self.extraction_path = path
-
- def cleanup_resources(self, force=False):
- """
- Delete all extracted resource files and directories, returning a list
- of the file and directory names that could not be successfully removed.
- This function does not have any concurrency protection, so it should
- generally only be called when the extraction path is a temporary
- directory exclusive to a single process. This method is not
- automatically called; you must call it explicitly or register it as an
- ``atexit`` function if you wish to ensure cleanup of a temporary
- directory used for extractions.
- """
- # XXX
-
-
-
-def get_default_cache():
- """Determine the default cache location
-
- This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
- Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
- "Application Data" directory. On all other systems, it's "~/.python-eggs".
- """
- try:
- return os.environ['PYTHON_EGG_CACHE']
- except KeyError:
- pass
-
- if os.name!='nt':
- return os.path.expanduser('~/.python-eggs')
-
- app_data = 'Application Data' # XXX this may be locale-specific!
- app_homes = [
- (('APPDATA',), None), # best option, should be locale-safe
- (('USERPROFILE',), app_data),
- (('HOMEDRIVE','HOMEPATH'), app_data),
- (('HOMEPATH',), app_data),
- (('HOME',), None),
- (('WINDIR',), app_data), # 95/98/ME
- ]
-
- for keys, subdir in app_homes:
- dirname = ''
- for key in keys:
- if key in os.environ:
- dirname = os.path.join(dirname, os.environ[key])
- else:
- break
- else:
- if subdir:
- dirname = os.path.join(dirname,subdir)
- return os.path.join(dirname, 'Python-Eggs')
- else:
- raise RuntimeError(
- "Please set the PYTHON_EGG_CACHE enviroment variable"
- )
-
-def safe_name(name):
- """Convert an arbitrary string to a standard distribution name
-
- Any runs of non-alphanumeric/. characters are replaced with a single '-'.
- """
- return re.sub('[^A-Za-z0-9.]+', '-', name)
-
-
-def safe_version(version):
- """Convert an arbitrary string to a standard version string
-
- Spaces become dots, and all other non-alphanumeric characters become
- dashes, with runs of multiple dashes condensed to a single dash.
- """
- version = version.replace(' ','.')
- return re.sub('[^A-Za-z0-9.]+', '-', version)
-
-
-def safe_extra(extra):
- """Convert an arbitrary string to a standard 'extra' name
-
- Any runs of non-alphanumeric characters are replaced with a single '_',
- and the result is always lowercased.
- """
- return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
-
-
-def to_filename(name):
- """Convert a project or version name to its filename-escaped form
-
- Any '-' characters are currently replaced with '_'.
- """
- return name.replace('-','_')
-
-
-
-
-
-
-
-
-class NullProvider:
- """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
-
- egg_name = None
- egg_info = None
- loader = None
-
- def __init__(self, module):
- self.loader = getattr(module, '__loader__', None)
- self.module_path = os.path.dirname(getattr(module, '__file__', ''))
-
- def get_resource_filename(self, manager, resource_name):
- return self._fn(self.module_path, resource_name)
-
- def get_resource_stream(self, manager, resource_name):
- return StringIO(self.get_resource_string(manager, resource_name))
-
- def get_resource_string(self, manager, resource_name):
- return self._get(self._fn(self.module_path, resource_name))
-
- def has_resource(self, resource_name):
- return self._has(self._fn(self.module_path, resource_name))
-
- def has_metadata(self, name):
- return self.egg_info and self._has(self._fn(self.egg_info,name))
-
- if sys.version_info <= (3,):
- def get_metadata(self, name):
- if not self.egg_info:
- return ""
- return self._get(self._fn(self.egg_info,name))
- else:
- def get_metadata(self, name):
- if not self.egg_info:
- return ""
- return self._get(self._fn(self.egg_info,name)).decode("utf-8")
-
- def get_metadata_lines(self, name):
- return yield_lines(self.get_metadata(name))
-
- def resource_isdir(self,resource_name):
- return self._isdir(self._fn(self.module_path, resource_name))
-
- def metadata_isdir(self,name):
- return self.egg_info and self._isdir(self._fn(self.egg_info,name))
-
-
- def resource_listdir(self,resource_name):
- return self._listdir(self._fn(self.module_path,resource_name))
-
- def metadata_listdir(self,name):
- if self.egg_info:
- return self._listdir(self._fn(self.egg_info,name))
- return []
-
- def run_script(self,script_name,namespace):
- script = 'scripts/'+script_name
- if not self.has_metadata(script):
- raise ResolutionError("No script named %r" % script_name)
- script_text = self.get_metadata(script).replace('\r\n','\n')
- script_text = script_text.replace('\r','\n')
- script_filename = self._fn(self.egg_info,script)
- namespace['__file__'] = script_filename
- if os.path.exists(script_filename):
- execfile(script_filename, namespace, namespace)
- else:
- from linecache import cache
- cache[script_filename] = (
- len(script_text), 0, script_text.split('\n'), script_filename
- )
- script_code = compile(script_text,script_filename,'exec')
- exec script_code in namespace, namespace
-
- def _has(self, path):
- raise NotImplementedError(
- "Can't perform this operation for unregistered loader type"
- )
-
- def _isdir(self, path):
- raise NotImplementedError(
- "Can't perform this operation for unregistered loader type"
- )
-
- def _listdir(self, path):
- raise NotImplementedError(
- "Can't perform this operation for unregistered loader type"
- )
-
- def _fn(self, base, resource_name):
- if resource_name:
- return os.path.join(base, *resource_name.split('/'))
- return base
-
- def _get(self, path):
- if hasattr(self.loader, 'get_data'):
- return self.loader.get_data(path)
- raise NotImplementedError(
- "Can't perform this operation for loaders without 'get_data()'"
- )
-
-register_loader_type(object, NullProvider)
-
-
-class EggProvider(NullProvider):
- """Provider based on a virtual filesystem"""
-
- def __init__(self,module):
- NullProvider.__init__(self,module)
- self._setup_prefix()
-
- def _setup_prefix(self):
- # we assume here that our metadata may be nested inside a "basket"
- # of multiple eggs; that's why we use module_path instead of .archive
- path = self.module_path
- old = None
- while path!=old:
- if path.lower().endswith('.egg'):
- self.egg_name = os.path.basename(path)
- self.egg_info = os.path.join(path, 'EGG-INFO')
- self.egg_root = path
- break
- old = path
- path, base = os.path.split(path)
-
-
-
-
-
-
-class DefaultProvider(EggProvider):
- """Provides access to package resources in the filesystem"""
-
- def _has(self, path):
- return os.path.exists(path)
-
- def _isdir(self,path):
- return os.path.isdir(path)
-
- def _listdir(self,path):
- return os.listdir(path)
-
- def get_resource_stream(self, manager, resource_name):
- return open(self._fn(self.module_path, resource_name), 'rb')
-
- def _get(self, path):
- stream = open(path, 'rb')
- try:
- return stream.read()
- finally:
- stream.close()
-
-register_loader_type(type(None), DefaultProvider)
-
-
-class EmptyProvider(NullProvider):
- """Provider that returns nothing for all requests"""
-
- _isdir = _has = lambda self,path: False
- _get = lambda self,path: ''
- _listdir = lambda self,path: []
- module_path = None
-
- def __init__(self):
- pass
-
-empty_provider = EmptyProvider()
-
-
-
-
-class ZipProvider(EggProvider):
- """Resource support for zips and eggs"""
-
- eagers = None
-
- def __init__(self, module):
- EggProvider.__init__(self,module)
- self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
- self.zip_pre = self.loader.archive+os.sep
-
- def _zipinfo_name(self, fspath):
- # Convert a virtual filename (full path to file) into a zipfile subpath
- # usable with the zipimport directory cache for our target archive
- if fspath.startswith(self.zip_pre):
- return fspath[len(self.zip_pre):]
- raise AssertionError(
- "%s is not a subpath of %s" % (fspath,self.zip_pre)
- )
-
- def _parts(self,zip_path):
- # Convert a zipfile subpath into an egg-relative path part list
- fspath = self.zip_pre+zip_path # pseudo-fs path
- if fspath.startswith(self.egg_root+os.sep):
- return fspath[len(self.egg_root)+1:].split(os.sep)
- raise AssertionError(
- "%s is not a subpath of %s" % (fspath,self.egg_root)
- )
-
- def get_resource_filename(self, manager, resource_name):
- if not self.egg_name:
- raise NotImplementedError(
- "resource_filename() only supported for .egg, not .zip"
- )
- # no need to lock for extraction, since we use temp names
- zip_path = self._resource_to_zip(resource_name)
- eagers = self._get_eager_resources()
- if '/'.join(self._parts(zip_path)) in eagers:
- for name in eagers:
- self._extract_resource(manager, self._eager_to_zip(name))
- return self._extract_resource(manager, zip_path)
-
- def _extract_resource(self, manager, zip_path):
-
- if zip_path in self._index():
- for name in self._index()[zip_path]:
- last = self._extract_resource(
- manager, os.path.join(zip_path, name)
- )
- return os.path.dirname(last) # return the extracted directory name
-
- zip_stat = self.zipinfo[zip_path]
- t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
- date_time = (
- (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd
- (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc.
- )
- timestamp = time.mktime(date_time)
-
- try:
- if not WRITE_SUPPORT:
- raise IOError('"os.rename" and "os.unlink" are not supported '
- 'on this platform')
-
- real_path = manager.get_cache_path(
- self.egg_name, self._parts(zip_path)
- )
-
- if os.path.isfile(real_path):
- stat = os.stat(real_path)
- if stat.st_size==size and stat.st_mtime==timestamp:
- # size and stamp match, don't bother extracting
- return real_path
-
- outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
- os.write(outf, self.loader.get_data(zip_path))
- os.close(outf)
- utime(tmpnam, (timestamp,timestamp))
- manager.postprocess(tmpnam, real_path)
-
- try:
- rename(tmpnam, real_path)
-
- except os.error:
- if os.path.isfile(real_path):
- stat = os.stat(real_path)
-
- if stat.st_size==size and stat.st_mtime==timestamp:
- # size and stamp match, somebody did it just ahead of
- # us, so we're done
- return real_path
- elif os.name=='nt': # Windows, del old file and retry
- unlink(real_path)
- rename(tmpnam, real_path)
- return real_path
- raise
-
- except os.error:
- manager.extraction_error() # report a user-friendly error
-
- return real_path
-
- def _get_eager_resources(self):
- if self.eagers is None:
- eagers = []
- for name in ('native_libs.txt', 'eager_resources.txt'):
- if self.has_metadata(name):
- eagers.extend(self.get_metadata_lines(name))
- self.eagers = eagers
- return self.eagers
-
- def _index(self):
- try:
- return self._dirindex
- except AttributeError:
- ind = {}
- for path in self.zipinfo:
- parts = path.split(os.sep)
- while parts:
- parent = os.sep.join(parts[:-1])
- if parent in ind:
- ind[parent].append(parts[-1])
- break
- else:
- ind[parent] = [parts.pop()]
- self._dirindex = ind
- return ind
-
- def _has(self, fspath):
- zip_path = self._zipinfo_name(fspath)
- return zip_path in self.zipinfo or zip_path in self._index()
-
- def _isdir(self,fspath):
- return self._zipinfo_name(fspath) in self._index()
-
- def _listdir(self,fspath):
- return list(self._index().get(self._zipinfo_name(fspath), ()))
-
- def _eager_to_zip(self,resource_name):
- return self._zipinfo_name(self._fn(self.egg_root,resource_name))
-
- def _resource_to_zip(self,resource_name):
- return self._zipinfo_name(self._fn(self.module_path,resource_name))
-
-register_loader_type(zipimport.zipimporter, ZipProvider)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class FileMetadata(EmptyProvider):
- """Metadata handler for standalone PKG-INFO files
-
- Usage::
-
- metadata = FileMetadata("/path/to/PKG-INFO")
-
- This provider rejects all data and metadata requests except for PKG-INFO,
- which is treated as existing, and will be the contents of the file at
- the provided location.
- """
-
- def __init__(self,path):
- self.path = path
-
- def has_metadata(self,name):
- return name=='PKG-INFO'
-
- def get_metadata(self,name):
- if name=='PKG-INFO':
- f = open(self.path,'rU')
- metadata = f.read()
- f.close()
- return metadata
- raise KeyError("No metadata except PKG-INFO is available")
-
- def get_metadata_lines(self,name):
- return yield_lines(self.get_metadata(name))
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class PathMetadata(DefaultProvider):
- """Metadata provider for egg directories
-
- Usage::
-
- # Development eggs:
-
- egg_info = "/path/to/PackageName.egg-info"
- base_dir = os.path.dirname(egg_info)
- metadata = PathMetadata(base_dir, egg_info)
- dist_name = os.path.splitext(os.path.basename(egg_info))[0]
- dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
-
- # Unpacked egg directories:
-
- egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
- metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
- dist = Distribution.from_filename(egg_path, metadata=metadata)
- """
-
- def __init__(self, path, egg_info):
- self.module_path = path
- self.egg_info = egg_info
-
-
-class EggMetadata(ZipProvider):
- """Metadata provider for .egg files"""
-
- def __init__(self, importer):
- """Create a metadata provider from a zipimporter"""
-
- self.zipinfo = zipimport._zip_directory_cache[importer.archive]
- self.zip_pre = importer.archive+os.sep
- self.loader = importer
- if importer.prefix:
- self.module_path = os.path.join(importer.archive, importer.prefix)
- else:
- self.module_path = importer.archive
- self._setup_prefix()
-
-
-class ImpWrapper:
- """PEP 302 Importer that wraps Python's "normal" import algorithm"""
-
- def __init__(self, path=None):
- self.path = path
-
- def find_module(self, fullname, path=None):
- subname = fullname.split(".")[-1]
- if subname != fullname and self.path is None:
- return None
- if self.path is None:
- path = None
- else:
- path = [self.path]
- try:
- file, filename, etc = imp.find_module(subname, path)
- except ImportError:
- return None
- return ImpLoader(file, filename, etc)
-
-
-class ImpLoader:
- """PEP 302 Loader that wraps Python's "normal" import algorithm"""
-
- def __init__(self, file, filename, etc):
- self.file = file
- self.filename = filename
- self.etc = etc
-
- def load_module(self, fullname):
- try:
- mod = imp.load_module(fullname, self.file, self.filename, self.etc)
- finally:
- if self.file: self.file.close()
- # Note: we don't set __loader__ because we want the module to look
- # normal; i.e. this is just a wrapper for standard import machinery
- return mod
-
-
-
-
-def get_importer(path_item):
- """Retrieve a PEP 302 "importer" for the given path item
-
- If there is no importer, this returns a wrapper around the builtin import
- machinery. The returned importer is only cached if it was created by a
- path hook.
- """
- try:
- importer = sys.path_importer_cache[path_item]
- except KeyError:
- for hook in sys.path_hooks:
- try:
- importer = hook(path_item)
- except ImportError:
- pass
- else:
- break
- else:
- importer = None
-
- sys.path_importer_cache.setdefault(path_item,importer)
- if importer is None:
- try:
- importer = ImpWrapper(path_item)
- except ImportError:
- pass
- return importer
-
-try:
- from pkgutil import get_importer, ImpImporter
-except ImportError:
- pass # Python 2.3 or 2.4, use our own implementation
-else:
- ImpWrapper = ImpImporter # Python 2.5, use pkgutil's implementation
- del ImpLoader, ImpImporter
-
-
-
-
-
-
-_distribution_finders = {}
-
-def register_finder(importer_type, distribution_finder):
- """Register `distribution_finder` to find distributions in sys.path items
-
- `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
- handler), and `distribution_finder` is a callable that, passed a path
- item and the importer instance, yields ``Distribution`` instances found on
- that path item. See ``pkg_resources.find_on_path`` for an example."""
- _distribution_finders[importer_type] = distribution_finder
-
-
-def find_distributions(path_item, only=False):
- """Yield distributions accessible via `path_item`"""
- importer = get_importer(path_item)
- finder = _find_adapter(_distribution_finders, importer)
- return finder(importer, path_item, only)
-
-def find_in_zip(importer, path_item, only=False):
- metadata = EggMetadata(importer)
- if metadata.has_metadata('PKG-INFO'):
- yield Distribution.from_filename(path_item, metadata=metadata)
- if only:
- return # don't yield nested distros
- for subitem in metadata.resource_listdir('/'):
- if subitem.endswith('.egg'):
- subpath = os.path.join(path_item, subitem)
- for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
- yield dist
-
-register_finder(zipimport.zipimporter, find_in_zip)
-
-def StringIO(*args, **kw):
- """Thunk to load the real StringIO on demand"""
- global StringIO
- try:
- from cStringIO import StringIO
- except ImportError:
- from StringIO import StringIO
- return StringIO(*args,**kw)
-
-def find_nothing(importer, path_item, only=False):
- return ()
-register_finder(object,find_nothing)
-
-def find_on_path(importer, path_item, only=False):
- """Yield distributions accessible on a sys.path directory"""
- path_item = _normalize_cached(path_item)
-
- if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
- if path_item.lower().endswith('.egg'):
- # unpacked egg
- yield Distribution.from_filename(
- path_item, metadata=PathMetadata(
- path_item, os.path.join(path_item,'EGG-INFO')
- )
- )
- else:
- # scan for .egg and .egg-info in directory
- for entry in os.listdir(path_item):
- lower = entry.lower()
- if lower.endswith('.egg-info'):
- fullpath = os.path.join(path_item, entry)
- if os.path.isdir(fullpath):
- # egg-info directory, allow getting metadata
- metadata = PathMetadata(path_item, fullpath)
- else:
- metadata = FileMetadata(fullpath)
- yield Distribution.from_location(
- path_item,entry,metadata,precedence=DEVELOP_DIST
- )
- elif not only and lower.endswith('.egg'):
- for dist in find_distributions(os.path.join(path_item, entry)):
- yield dist
- elif not only and lower.endswith('.egg-link'):
- for line in open(os.path.join(path_item, entry)):
- if not line.strip(): continue
- for item in find_distributions(os.path.join(path_item,line.rstrip())):
- yield item
- break
-register_finder(ImpWrapper,find_on_path)
-
-_namespace_handlers = {}
-_namespace_packages = {}
-
-def register_namespace_handler(importer_type, namespace_handler):
- """Register `namespace_handler` to declare namespace packages
-
- `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
- handler), and `namespace_handler` is a callable like this::
-
- def namespace_handler(importer,path_entry,moduleName,module):
- # return a path_entry to use for child packages
-
- Namespace handlers are only called if the importer object has already
- agreed that it can handle the relevant path item, and they should only
- return a subpath if the module __path__ does not already contain an
- equivalent subpath. For an example namespace handler, see
- ``pkg_resources.file_ns_handler``.
- """
- _namespace_handlers[importer_type] = namespace_handler
-
-def _handle_ns(packageName, path_item):
- """Ensure that named package includes a subpath of path_item (if needed)"""
- importer = get_importer(path_item)
- if importer is None:
- return None
- loader = importer.find_module(packageName)
- if loader is None:
- return None
- module = sys.modules.get(packageName)
- if module is None:
- module = sys.modules[packageName] = types.ModuleType(packageName)
- module.__path__ = []; _set_parent_ns(packageName)
- elif not hasattr(module,'__path__'):
- raise TypeError("Not a package:", packageName)
- handler = _find_adapter(_namespace_handlers, importer)
- subpath = handler(importer,path_item,packageName,module)
- if subpath is not None:
- path = module.__path__; path.append(subpath)
- loader.load_module(packageName); module.__path__ = path
- return subpath
-
-def declare_namespace(packageName):
- """Declare that package 'packageName' is a namespace package"""
-
- imp.acquire_lock()
- try:
- if packageName in _namespace_packages:
- return
-
- path, parent = sys.path, None
- if '.' in packageName:
- parent = '.'.join(packageName.split('.')[:-1])
- declare_namespace(parent)
- __import__(parent)
- try:
- path = sys.modules[parent].__path__
- except AttributeError:
- raise TypeError("Not a package:", parent)
-
- # Track what packages are namespaces, so when new path items are added,
- # they can be updated
- _namespace_packages.setdefault(parent,[]).append(packageName)
- _namespace_packages.setdefault(packageName,[])
-
- for path_item in path:
- # Ensure all the parent's path items are reflected in the child,
- # if they apply
- _handle_ns(packageName, path_item)
-
- finally:
- imp.release_lock()
-
-def fixup_namespace_packages(path_item, parent=None):
- """Ensure that previously-declared namespace packages include path_item"""
- imp.acquire_lock()
- try:
- for package in _namespace_packages.get(parent,()):
- subpath = _handle_ns(package, path_item)
- if subpath: fixup_namespace_packages(subpath,package)
- finally:
- imp.release_lock()
-
-def file_ns_handler(importer, path_item, packageName, module):
- """Compute an ns-package subpath for a filesystem or zipfile importer"""
-
- subpath = os.path.join(path_item, packageName.split('.')[-1])
- normalized = _normalize_cached(subpath)
- for item in module.__path__:
- if _normalize_cached(item)==normalized:
- break
- else:
- # Only return the path if it's not already there
- return subpath
-
-register_namespace_handler(ImpWrapper,file_ns_handler)
-register_namespace_handler(zipimport.zipimporter,file_ns_handler)
-
-
-def null_ns_handler(importer, path_item, packageName, module):
- return None
-
-register_namespace_handler(object,null_ns_handler)
-
-
-def normalize_path(filename):
- """Normalize a file/dir name for comparison purposes"""
- return os.path.normcase(os.path.realpath(filename))
-
-def _normalize_cached(filename,_cache={}):
- try:
- return _cache[filename]
- except KeyError:
- _cache[filename] = result = normalize_path(filename)
- return result
-
-def _set_parent_ns(packageName):
- parts = packageName.split('.')
- name = parts.pop()
- if parts:
- parent = '.'.join(parts)
- setattr(sys.modules[parent], name, sys.modules[packageName])
-
-
-def yield_lines(strs):
- """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
- if isinstance(strs,basestring):
- for s in strs.splitlines():
- s = s.strip()
- if s and not s.startswith('#'): # skip blank lines/comments
- yield s
- else:
- for ss in strs:
- for s in yield_lines(ss):
- yield s
-
-LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
-CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
-DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
-VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
-COMMA = re.compile(r"\s*,").match # comma between items
-OBRACKET = re.compile(r"\s*\[").match
-CBRACKET = re.compile(r"\s*\]").match
-MODULE = re.compile(r"\w+(\.\w+)*$").match
-EGG_NAME = re.compile(
- r"(?P<name>[^-]+)"
- r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
- re.VERBOSE | re.IGNORECASE
-).match
-
-component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
-replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
-
-def _parse_version_parts(s):
- for part in component_re.split(s):
- part = replace(part,part)
- if not part or part=='.':
- continue
- if part[:1] in '0123456789':
- yield part.zfill(8) # pad for numeric comparison
- else:
- yield '*'+part
-
- yield '*final' # ensure that alpha/beta/candidate are before final
-
-def parse_version(s):
- """Convert a version string to a chronologically-sortable key
-
- This is a rough cross between distutils' StrictVersion and LooseVersion;
- if you give it versions that would work with StrictVersion, then it behaves
- the same; otherwise it acts like a slightly-smarter LooseVersion. It is
- *possible* to create pathological version coding schemes that will fool
- this parser, but they should be very rare in practice.
-
- The returned value will be a tuple of strings. Numeric portions of the
- version are padded to 8 digits so they will compare numerically, but
- without relying on how numbers compare relative to strings. Dots are
- dropped, but dashes are retained. Trailing zeros between alpha segments
- or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
- "2.4". Alphanumeric parts are lower-cased.
-
- The algorithm assumes that strings like "-" and any alpha string that
- alphabetically follows "final" represents a "patch level". So, "2.4-1"
- is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
- considered newer than "2.4-1", which in turn is newer than "2.4".
-
- Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
- come before "final" alphabetically) are assumed to be pre-release versions,
- so that the version "2.4" is considered newer than "2.4a1".
-
- Finally, to handle miscellaneous cases, the strings "pre", "preview", and
- "rc" are treated as if they were "c", i.e. as though they were release
- candidates, and therefore are not as new as a version string that does not
- contain them, and "dev" is replaced with an '@' so that it sorts lower than
- than any other pre-release tag.
- """
- parts = []
- for part in _parse_version_parts(s.lower()):
- if part.startswith('*'):
- if part<'*final': # remove '-' before a prerelease tag
- while parts and parts[-1]=='*final-': parts.pop()
- # remove trailing zeros from each series of numeric parts
- while parts and parts[-1]=='00000000':
- parts.pop()
- parts.append(part)
- return tuple(parts)
-
-class EntryPoint(object):
- """Object representing an advertised importable object"""
-
- def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
- if not MODULE(module_name):
- raise ValueError("Invalid module name", module_name)
- self.name = name
- self.module_name = module_name
- self.attrs = tuple(attrs)
- self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
- self.dist = dist
-
- def __str__(self):
- s = "%s = %s" % (self.name, self.module_name)
- if self.attrs:
- s += ':' + '.'.join(self.attrs)
- if self.extras:
- s += ' [%s]' % ','.join(self.extras)
- return s
-
- def __repr__(self):
- return "EntryPoint.parse(%r)" % str(self)
-
- def load(self, require=True, env=None, installer=None):
- if require: self.require(env, installer)
- entry = __import__(self.module_name, globals(),globals(), ['__name__'])
- for attr in self.attrs:
- try:
- entry = getattr(entry,attr)
- except AttributeError:
- raise ImportError("%r has no %r attribute" % (entry,attr))
- return entry
-
- def require(self, env=None, installer=None):
- if self.extras and not self.dist:
- raise UnknownExtra("Can't require() without a distribution", self)
- map(working_set.add,
- working_set.resolve(self.dist.requires(self.extras),env,installer))
-
-
-
- #@classmethod
- def parse(cls, src, dist=None):
- """Parse a single entry point from string `src`
-
- Entry point syntax follows the form::
-
- name = some.module:some.attr [extra1,extra2]
-
- The entry name and module name are required, but the ``:attrs`` and
- ``[extras]`` parts are optional
- """
- try:
- attrs = extras = ()
- name,value = src.split('=',1)
- if '[' in value:
- value,extras = value.split('[',1)
- req = Requirement.parse("x["+extras)
- if req.specs: raise ValueError
- extras = req.extras
- if ':' in value:
- value,attrs = value.split(':',1)
- if not MODULE(attrs.rstrip()):
- raise ValueError
- attrs = attrs.rstrip().split('.')
- except ValueError:
- raise ValueError(
- "EntryPoint must be in 'name=module:attrs [extras]' format",
- src
- )
- else:
- return cls(name.strip(), value.strip(), attrs, extras, dist)
-
- parse = classmethod(parse)
-
-
-
-
-
-
-
-
- #@classmethod
- def parse_group(cls, group, lines, dist=None):
- """Parse an entry point group"""
- if not MODULE(group):
- raise ValueError("Invalid group name", group)
- this = {}
- for line in yield_lines(lines):
- ep = cls.parse(line, dist)
- if ep.name in this:
- raise ValueError("Duplicate entry point", group, ep.name)
- this[ep.name]=ep
- return this
-
- parse_group = classmethod(parse_group)
-
- #@classmethod
- def parse_map(cls, data, dist=None):
- """Parse a map of entry point groups"""
- if isinstance(data,dict):
- data = data.items()
- else:
- data = split_sections(data)
- maps = {}
- for group, lines in data:
- if group is None:
- if not lines:
- continue
- raise ValueError("Entry points must be listed in groups")
- group = group.strip()
- if group in maps:
- raise ValueError("Duplicate group name", group)
- maps[group] = cls.parse_group(group, lines, dist)
- return maps
-
- parse_map = classmethod(parse_map)
-
-
-def _remove_md5_fragment(location):
- if not location:
- return ''
- parsed = urlparse(location)
- if parsed[-1].startswith('md5='):
- return urlunparse(parsed[:-1] + ('',))
- return location
-
-
-class Distribution(object):
- """Wrap an actual or potential sys.path entry w/metadata"""
- def __init__(self,
- location=None, metadata=None, project_name=None, version=None,
- py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
- ):
- self.project_name = safe_name(project_name or 'Unknown')
- if version is not None:
- self._version = safe_version(version)
- self.py_version = py_version
- self.platform = platform
- self.location = location
- self.precedence = precedence
- self._provider = metadata or empty_provider
-
- #@classmethod
- def from_location(cls,location,basename,metadata=None,**kw):
- project_name, version, py_version, platform = [None]*4
- basename, ext = os.path.splitext(basename)
- if ext.lower() in (".egg",".egg-info"):
- match = EGG_NAME(basename)
- if match:
- project_name, version, py_version, platform = match.group(
- 'name','ver','pyver','plat'
- )
- return cls(
- location, metadata, project_name=project_name, version=version,
- py_version=py_version, platform=platform, **kw
- )
- from_location = classmethod(from_location)
-
-
- hashcmp = property(
- lambda self: (
- getattr(self,'parsed_version',()),
- self.precedence,
- self.key,
- _remove_md5_fragment(self.location),
- self.py_version,
- self.platform
- )
- )
- def __hash__(self): return hash(self.hashcmp)
- def __lt__(self, other):
- return self.hashcmp < other.hashcmp
- def __le__(self, other):
- return self.hashcmp <= other.hashcmp
- def __gt__(self, other):
- return self.hashcmp > other.hashcmp
- def __ge__(self, other):
- return self.hashcmp >= other.hashcmp
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- # It's not a Distribution, so they are not equal
- return False
- return self.hashcmp == other.hashcmp
- def __ne__(self, other):
- return not self == other
-
- # These properties have to be lazy so that we don't have to load any
- # metadata until/unless it's actually needed. (i.e., some distributions
- # may not know their name or version without loading PKG-INFO)
-
- #@property
- def key(self):
- try:
- return self._key
- except AttributeError:
- self._key = key = self.project_name.lower()
- return key
- key = property(key)
-
- #@property
- def parsed_version(self):
- try:
- return self._parsed_version
- except AttributeError:
- self._parsed_version = pv = parse_version(self.version)
- return pv
-
- parsed_version = property(parsed_version)
-
- #@property
- def version(self):
- try:
- return self._version
- except AttributeError:
- for line in self._get_metadata('PKG-INFO'):
- if line.lower().startswith('version:'):
- self._version = safe_version(line.split(':',1)[1].strip())
- return self._version
- else:
- raise ValueError(
- "Missing 'Version:' header and/or PKG-INFO file", self
- )
- version = property(version)
-
-
-
-
- #@property
- def _dep_map(self):
- try:
- return self.__dep_map
- except AttributeError:
- dm = self.__dep_map = {None: []}
- for name in 'requires.txt', 'depends.txt':
- for extra,reqs in split_sections(self._get_metadata(name)):
- if extra: extra = safe_extra(extra)
- dm.setdefault(extra,[]).extend(parse_requirements(reqs))
- return dm
- _dep_map = property(_dep_map)
-
- def requires(self,extras=()):
- """List of Requirements needed for this distro if `extras` are used"""
- dm = self._dep_map
- deps = []
- deps.extend(dm.get(None,()))
- for ext in extras:
- try:
- deps.extend(dm[safe_extra(ext)])
- except KeyError:
- raise UnknownExtra(
- "%s has no such extra feature %r" % (self, ext)
- )
- return deps
-
- def _get_metadata(self,name):
- if self.has_metadata(name):
- for line in self.get_metadata_lines(name):
- yield line
-
- def activate(self,path=None):
- """Ensure distribution is importable on `path` (default=sys.path)"""
- if path is None: path = sys.path
- self.insert_on(path)
- if path is sys.path:
- fixup_namespace_packages(self.location)
- map(declare_namespace, self._get_metadata('namespace_packages.txt'))
-
-
- def egg_name(self):
- """Return what this distribution's standard .egg filename should be"""
- filename = "%s-%s-py%s" % (
- to_filename(self.project_name), to_filename(self.version),
- self.py_version or PY_MAJOR
- )
-
- if self.platform:
- filename += '-'+self.platform
- return filename
-
- def __repr__(self):
- if self.location:
- return "%s (%s)" % (self,self.location)
- else:
- return str(self)
-
- def __str__(self):
- try: version = getattr(self,'version',None)
- except ValueError: version = None
- version = version or "[unknown version]"
- return "%s %s" % (self.project_name,version)
-
- def __getattr__(self,attr):
- """Delegate all unrecognized public attributes to .metadata provider"""
- if attr.startswith('_'):
- raise AttributeError,attr
- return getattr(self._provider, attr)
-
- #@classmethod
- def from_filename(cls,filename,metadata=None, **kw):
- return cls.from_location(
- _normalize_cached(filename), os.path.basename(filename), metadata,
- **kw
- )
- from_filename = classmethod(from_filename)
-
- def as_requirement(self):
- """Return a ``Requirement`` that matches this distribution exactly"""
- return Requirement.parse('%s==%s' % (self.project_name, self.version))
-
- def load_entry_point(self, group, name):
- """Return the `name` entry point of `group` or raise ImportError"""
- ep = self.get_entry_info(group,name)
- if ep is None:
- raise ImportError("Entry point %r not found" % ((group,name),))
- return ep.load()
-
- def get_entry_map(self, group=None):
- """Return the entry point map for `group`, or the full entry map"""
- try:
- ep_map = self._ep_map
- except AttributeError:
- ep_map = self._ep_map = EntryPoint.parse_map(
- self._get_metadata('entry_points.txt'), self
- )
- if group is not None:
- return ep_map.get(group,{})
- return ep_map
-
- def get_entry_info(self, group, name):
- """Return the EntryPoint object for `group`+`name`, or ``None``"""
- return self.get_entry_map(group).get(name)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def insert_on(self, path, loc = None):
- """Insert self.location in path before its nearest parent directory"""
-
- loc = loc or self.location
-
- if self.project_name == 'setuptools':
- try:
- version = self.version
- except ValueError:
- version = ''
- if '0.7' in version:
- raise ValueError(
- "A 0.7-series setuptools cannot be installed "
- "with distribute. Found one at %s" % str(self.location))
-
- if not loc:
- return
-
- if path is sys.path:
- self.check_version_conflict()
-
- nloc = _normalize_cached(loc)
- bdir = os.path.dirname(nloc)
- npath= map(_normalize_cached, path)
-
- bp = None
- for p, item in enumerate(npath):
- if item==nloc:
- break
- elif item==bdir and self.precedence==EGG_DIST:
- # if it's an .egg, give it precedence over its directory
- path.insert(p, loc)
- npath.insert(p, nloc)
- break
- else:
- path.append(loc)
- return
-
- # p is the spot where we found or inserted loc; now remove duplicates
- while 1:
- try:
- np = npath.index(nloc, p+1)
- except ValueError:
- break
- else:
- del npath[np], path[np]
- p = np # ha!
-
- return
-
-
-
- def check_version_conflict(self):
- if self.key=='distribute':
- return # ignore the inevitable setuptools self-conflicts :(
-
- nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
- loc = normalize_path(self.location)
- for modname in self._get_metadata('top_level.txt'):
- if (modname not in sys.modules or modname in nsp
- or modname in _namespace_packages
- ):
- continue
- if modname in ('pkg_resources', 'setuptools', 'site'):
- continue
- fn = getattr(sys.modules[modname], '__file__', None)
- if fn and (normalize_path(fn).startswith(loc) or
- fn.startswith(self.location)):
- continue
- issue_warning(
- "Module %s was already imported from %s, but %s is being added"
- " to sys.path" % (modname, fn, self.location),
- )
-
- def has_version(self):
- try:
- self.version
- except ValueError:
- issue_warning("Unbuilt egg for "+repr(self))
- return False
- return True
-
- def clone(self,**kw):
- """Copy this distribution, substituting in any changed keyword args"""
- for attr in (
- 'project_name', 'version', 'py_version', 'platform', 'location',
- 'precedence'
- ):
- kw.setdefault(attr, getattr(self,attr,None))
- kw.setdefault('metadata', self._provider)
- return self.__class__(**kw)
-
-
-
-
- #@property
- def extras(self):
- return [dep for dep in self._dep_map if dep]
- extras = property(extras)
-
-
-def issue_warning(*args,**kw):
- level = 1
- g = globals()
- try:
- # find the first stack frame that is *not* code in
- # the pkg_resources module, to use for the warning
- while sys._getframe(level).f_globals is g:
- level += 1
- except ValueError:
- pass
- from warnings import warn
- warn(stacklevel = level+1, *args, **kw)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def parse_requirements(strs):
- """Yield ``Requirement`` objects for each specification in `strs`
-
- `strs` must be an instance of ``basestring``, or a (possibly-nested)
- iterable thereof.
- """
- # create a steppable iterator, so we can handle \-continuations
- lines = iter(yield_lines(strs))
-
- def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
-
- items = []
-
- while not TERMINATOR(line,p):
- if CONTINUE(line,p):
- try:
- line = lines.next(); p = 0
- except StopIteration:
- raise ValueError(
- "\\ must not appear on the last nonblank line"
- )
-
- match = ITEM(line,p)
- if not match:
- raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
-
- items.append(match.group(*groups))
- p = match.end()
-
- match = COMMA(line,p)
- if match:
- p = match.end() # skip the comma
- elif not TERMINATOR(line,p):
- raise ValueError(
- "Expected ',' or end-of-list in",line,"at",line[p:]
- )
-
- match = TERMINATOR(line,p)
- if match: p = match.end() # skip the terminator, if any
- return line, p, items
-
- for line in lines:
- match = DISTRO(line)
- if not match:
- raise ValueError("Missing distribution spec", line)
- project_name = match.group(1)
- p = match.end()
- extras = []
-
- match = OBRACKET(line,p)
- if match:
- p = match.end()
- line, p, extras = scan_list(
- DISTRO, CBRACKET, line, p, (1,), "'extra' name"
- )
-
- line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
- specs = [(op,safe_version(val)) for op,val in specs]
- yield Requirement(project_name, specs, extras)
-
-
-def _sort_dists(dists):
- tmp = [(dist.hashcmp,dist) for dist in dists]
- tmp.sort()
- dists[::-1] = [d for hc,d in tmp]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class Requirement:
- def __init__(self, project_name, specs, extras):
- """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
- self.unsafe_name, project_name = project_name, safe_name(project_name)
- self.project_name, self.key = project_name, project_name.lower()
- index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
- index.sort()
- self.specs = [(op,ver) for parsed,trans,op,ver in index]
- self.index, self.extras = index, tuple(map(safe_extra,extras))
- self.hashCmp = (
- self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
- frozenset(self.extras)
- )
- self.__hash = hash(self.hashCmp)
-
- def __str__(self):
- specs = ','.join([''.join(s) for s in self.specs])
- extras = ','.join(self.extras)
- if extras: extras = '[%s]' % extras
- return '%s%s%s' % (self.project_name, extras, specs)
-
- def __eq__(self,other):
- return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
-
- def __contains__(self,item):
- if isinstance(item,Distribution):
- if item.key <> self.key: return False
- if self.index: item = item.parsed_version # only get if we need it
- elif isinstance(item,basestring):
- item = parse_version(item)
- last = None
- compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1
- for parsed,trans,op,ver in self.index:
- action = trans[compare(item,parsed)] # Indexing: 0, 1, -1
- if action=='F': return False
- elif action=='T': return True
- elif action=='+': last = True
- elif action=='-' or last is None: last = False
- if last is None: last = True # no rules encountered
- return last
-
-
- def __hash__(self):
- return self.__hash
-
- def __repr__(self): return "Requirement.parse(%r)" % str(self)
-
- #@staticmethod
- def parse(s, replacement=True):
- reqs = list(parse_requirements(s))
- if reqs:
- if len(reqs) == 1:
- founded_req = reqs[0]
- # if asked for setuptools distribution
- # and if distribute is installed, we want to give
- # distribute instead
- if _override_setuptools(founded_req) and replacement:
- distribute = list(parse_requirements('distribute'))
- if len(distribute) == 1:
- return distribute[0]
- return founded_req
- else:
- return founded_req
-
- raise ValueError("Expected only one requirement", s)
- raise ValueError("No requirements found", s)
-
- parse = staticmethod(parse)
-
-state_machine = {
- # =><
- '<' : '--T',
- '<=': 'T-T',
- '>' : 'F+F',
- '>=': 'T+F',
- '==': 'T..',
- '!=': 'F++',
-}
-
-
-def _override_setuptools(req):
- """Return True when distribute wants to override a setuptools dependency.
-
- We want to override when the requirement is setuptools and the version is
- a variant of 0.6.
-
- """
- if req.project_name == 'setuptools':
- if not len(req.specs):
- # Just setuptools: ok
- return True
- for comparator, version in req.specs:
- if comparator in ['==', '>=', '>']:
- if '0.7' in version:
- # We want some setuptools not from the 0.6 series.
- return False
- return True
- return False
-
-
-def _get_mro(cls):
- """Get an mro for a type or classic class"""
- if not isinstance(cls,type):
- class cls(cls,object): pass
- return cls.__mro__[1:]
- return cls.__mro__
-
-def _find_adapter(registry, ob):
- """Return an adapter factory for `ob` from `registry`"""
- for t in _get_mro(getattr(ob, '__class__', type(ob))):
- if t in registry:
- return registry[t]
-
-
-def ensure_directory(path):
- """Ensure that the parent directory of `path` exists"""
- dirname = os.path.dirname(path)
- if not os.path.isdir(dirname):
- os.makedirs(dirname)
-
-def split_sections(s):
- """Split a string or iterable thereof into (section,content) pairs
-
- Each ``section`` is a stripped version of the section header ("[section]")
- and each ``content`` is a list of stripped lines excluding blank lines and
- comment-only lines. If there are any such lines before the first section
- header, they're returned in a first ``section`` of ``None``.
- """
- section = None
- content = []
- for line in yield_lines(s):
- if line.startswith("["):
- if line.endswith("]"):
- if section or content:
- yield section, content
- section = line[1:-1].strip()
- content = []
- else:
- raise ValueError("Invalid section heading", line)
- else:
- content.append(line)
-
- # wrap up last segment
- yield section, content
-
-def _mkstemp(*args,**kw):
- from tempfile import mkstemp
- old_open = os.open
- try:
- os.open = os_open # temporarily bypass sandboxing
- return mkstemp(*args,**kw)
- finally:
- os.open = old_open # and then put it back
-
-
-# Set up global resource manager
-_manager = ResourceManager()
-def _initialize(g):
- for name in dir(_manager):
- if not name.startswith('_'):
- g[name] = getattr(_manager, name)
-_initialize(globals())
-
-# Prepare the master working set and make the ``require()`` API available
-working_set = WorkingSet()
-try:
- # Does the main program list any requirements?
- from __main__ import __requires__
-except ImportError:
- pass # No: just use the default working set based on sys.path
-else:
- # Yes: ensure the requirements are met, by prefixing sys.path if necessary
- try:
- working_set.require(__requires__)
- except VersionConflict: # try it without defaults already on sys.path
- working_set = WorkingSet([]) # by starting with an empty path
- for dist in working_set.resolve(
- parse_requirements(__requires__), Environment()
- ):
- working_set.add(dist)
- for entry in sys.path: # add any missing entries from sys.path
- if entry not in working_set.entries:
- working_set.add_entry(entry)
- sys.path[:] = working_set.entries # then copy back to sys.path
-
-require = working_set.require
-iter_entry_points = working_set.iter_entry_points
-add_activation_listener = working_set.subscribe
-run_script = working_set.run_script
-run_main = run_script # backward compatibility
-# Activate all distributions already on sys.path, and ensure that
-# all distributions added to the working set in the future (e.g. by
-# calling ``require()``) will get activated as well.
-add_activation_listener(lambda dist: dist.activate())
-working_set.entries=[]; map(working_set.add_entry,sys.path) # match order
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py
deleted file mode 100755
index 9de373f9..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""Extensions to the 'distutils' for large or complex distributions"""
-from setuptools.extension import Extension, Library
-from setuptools.dist import Distribution, Feature, _get_unpatched
-import distutils.core, setuptools.command
-from setuptools.depends import Require
-from distutils.core import Command as _Command
-from distutils.util import convert_path
-import os
-import sys
-
-__version__ = '0.6'
-__all__ = [
- 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
- 'find_packages'
-]
-
-# This marker is used to simplify the process that checks is the
-# setuptools package was installed by the Setuptools project
-# or by the Distribute project, in case Setuptools creates
-# a distribution with the same version.
-#
-# The distribute_setup script for instance, will check if this
-# attribute is present to decide whether to reinstall the package
-# or not.
-_distribute = True
-
-bootstrap_install_from = None
-
-# If we run 2to3 on .py files, should we also convert docstrings?
-# Default: yes; assume that we can detect doctests reliably
-run_2to3_on_doctests = True
-# Standard package names for fixer packages
-lib2to3_fixer_packages = ['lib2to3.fixes']
-
-def find_packages(where='.', exclude=()):
- """Return a list all Python packages found within directory 'where'
-
- 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
- will be converted to the appropriate local path syntax. 'exclude' is a
- sequence of package names to exclude; '*' can be used as a wildcard in the
- names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
- 'foo' itself).
- """
- out = []
- stack=[(convert_path(where), '')]
- while stack:
- where,prefix = stack.pop(0)
- for name in os.listdir(where):
- fn = os.path.join(where,name)
- if ('.' not in name and os.path.isdir(fn) and
- os.path.isfile(os.path.join(fn,'__init__.py'))
- ):
- out.append(prefix+name); stack.append((fn,prefix+name+'.'))
- for pat in list(exclude)+['ez_setup', 'distribute_setup']:
- from fnmatch import fnmatchcase
- out = [item for item in out if not fnmatchcase(item,pat)]
- return out
-
-setup = distutils.core.setup
-
-_Command = _get_unpatched(_Command)
-
-class Command(_Command):
- __doc__ = _Command.__doc__
-
- command_consumes_arguments = False
-
- def __init__(self, dist, **kw):
- # Add support for keyword arguments
- _Command.__init__(self,dist)
- for k,v in kw.items():
- setattr(self,k,v)
-
- def reinitialize_command(self, command, reinit_subcommands=0, **kw):
- cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
- for k,v in kw.items():
- setattr(cmd,k,v) # update command with keywords
- return cmd
-
-import distutils.core
-distutils.core.Command = Command # we can't patch distutils.cmd, alas
-
-def findall(dir = os.curdir):
- """Find all files under 'dir' and return the list of full filenames
- (relative to 'dir').
- """
- all_files = []
- for base, dirs, files in os.walk(dir):
- if base==os.curdir or base.startswith(os.curdir+os.sep):
- base = base[2:]
- if base:
- files = [os.path.join(base, f) for f in files]
- all_files.extend(filter(os.path.isfile, files))
- return all_files
-
-import distutils.filelist
-distutils.filelist.findall = findall # fix findall bug in distutils.
-
-# sys.dont_write_bytecode was introduced in Python 2.6.
-if ((hasattr(sys, "dont_write_bytecode") and sys.dont_write_bytecode) or
- (not hasattr(sys, "dont_write_bytecode") and os.environ.get("PYTHONDONTWRITEBYTECODE"))):
- _dont_write_bytecode = True
-else:
- _dont_write_bytecode = False
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py
deleted file mode 100755
index ab786f3d..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py
+++ /dev/null
@@ -1,208 +0,0 @@
-"""Utilities for extracting common archive formats"""
-
-
-__all__ = [
- "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
- "UnrecognizedFormat", "extraction_drivers", "unpack_directory",
-]
-
-import zipfile, tarfile, os, shutil
-from pkg_resources import ensure_directory
-from distutils.errors import DistutilsError
-
-class UnrecognizedFormat(DistutilsError):
- """Couldn't recognize the archive type"""
-
-def default_filter(src,dst):
- """The default progress/filter callback; returns True for all files"""
- return dst
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def unpack_archive(filename, extract_dir, progress_filter=default_filter,
- drivers=None
-):
- """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
-
- `progress_filter` is a function taking two arguments: a source path
- internal to the archive ('/'-separated), and a filesystem path where it
- will be extracted. The callback must return the desired extract path
- (which may be the same as the one passed in), or else ``None`` to skip
- that file or directory. The callback can thus be used to report on the
- progress of the extraction, as well as to filter the items extracted or
- alter their extraction paths.
-
- `drivers`, if supplied, must be a non-empty sequence of functions with the
- same signature as this function (minus the `drivers` argument), that raise
- ``UnrecognizedFormat`` if they do not support extracting the designated
- archive type. The `drivers` are tried in sequence until one is found that
- does not raise an error, or until all are exhausted (in which case
- ``UnrecognizedFormat`` is raised). If you do not supply a sequence of
- drivers, the module's ``extraction_drivers`` constant will be used, which
- means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
- order.
- """
- for driver in drivers or extraction_drivers:
- try:
- driver(filename, extract_dir, progress_filter)
- except UnrecognizedFormat:
- continue
- else:
- return
- else:
- raise UnrecognizedFormat(
- "Not a recognized archive type: %s" % filename
- )
-
-
-
-
-
-
-
-def unpack_directory(filename, extract_dir, progress_filter=default_filter):
- """"Unpack" a directory, using the same interface as for archives
-
- Raises ``UnrecognizedFormat`` if `filename` is not a directory
- """
- if not os.path.isdir(filename):
- raise UnrecognizedFormat("%s is not a directory" % (filename,))
-
- paths = {filename:('',extract_dir)}
- for base, dirs, files in os.walk(filename):
- src,dst = paths[base]
- for d in dirs:
- paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
- for f in files:
- name = src+f
- target = os.path.join(dst,f)
- target = progress_filter(src+f, target)
- if not target:
- continue # skip non-files
- ensure_directory(target)
- f = os.path.join(base,f)
- shutil.copyfile(f, target)
- shutil.copystat(f, target)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
- """Unpack zip `filename` to `extract_dir`
-
- Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
- by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
- of the `progress_filter` argument.
- """
-
- if not zipfile.is_zipfile(filename):
- raise UnrecognizedFormat("%s is not a zip file" % (filename,))
-
- z = zipfile.ZipFile(filename)
- try:
- for info in z.infolist():
- name = info.filename
-
- # don't extract absolute paths or ones with .. in them
- if name.startswith('/') or '..' in name:
- continue
-
- target = os.path.join(extract_dir, *name.split('/'))
- target = progress_filter(name, target)
- if not target:
- continue
- if name.endswith('/'):
- # directory
- ensure_directory(target)
- else:
- # file
- ensure_directory(target)
- data = z.read(info.filename)
- f = open(target,'wb')
- try:
- f.write(data)
- finally:
- f.close()
- del data
- finally:
- z.close()
-
-
-def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
- """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
-
- Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
- by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
- of the `progress_filter` argument.
- """
-
- try:
- tarobj = tarfile.open(filename)
- except tarfile.TarError:
- raise UnrecognizedFormat(
- "%s is not a compressed or uncompressed tar file" % (filename,)
- )
-
- try:
- tarobj.chown = lambda *args: None # don't do any chowning!
- for member in tarobj:
- if member.isfile() or member.isdir():
- name = member.name
- # don't extract absolute paths or ones with .. in them
- if not name.startswith('/') and '..' not in name:
- dst = os.path.join(extract_dir, *name.split('/'))
- dst = progress_filter(name, dst)
- if dst:
- if dst.endswith(os.sep):
- dst = dst[:-1]
- try:
- tarobj._extract_member(member,dst) # XXX Ugh
- except tarfile.ExtractError:
- pass # chown/chmod/mkfifo/mknode/makedev failed
- return True
- finally:
- tarobj.close()
-
-
-
-
-extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
-
-
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exe b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exe
deleted file mode 100644
index 8906ff77..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exe
+++ /dev/null
Binary files differ
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py
deleted file mode 100755
index 152406b3..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py
+++ /dev/null
@@ -1,22 +0,0 @@
-__all__ = [
- 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
- 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
- 'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts',
- 'register', 'bdist_wininst', 'upload_docs',
-]
-
-from setuptools.command import install_scripts
-import sys
-
-if sys.version>='2.5':
- # In Python 2.5 and above, distutils includes its own upload command
- __all__.remove('upload')
-
-from distutils.command.bdist import bdist
-
-
-if 'egg' not in bdist.format_commands:
- bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
- bdist.format_commands.append('egg')
-
-del bdist, sys
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py
deleted file mode 100755
index f5368b29..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import distutils, os
-from setuptools import Command
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import *
-from setuptools.command.setopt import edit_config, option_base, config_file
-
-def shquote(arg):
- """Quote an argument for later parsing by shlex.split()"""
- for c in '"', "'", "\\", "#":
- if c in arg: return repr(arg)
- if arg.split()<>[arg]:
- return repr(arg)
- return arg
-
-
-class alias(option_base):
- """Define a shortcut that invokes one or more commands"""
-
- description = "define a shortcut to invoke one or more commands"
- command_consumes_arguments = True
-
- user_options = [
- ('remove', 'r', 'remove (unset) the alias'),
- ] + option_base.user_options
-
- boolean_options = option_base.boolean_options + ['remove']
-
- def initialize_options(self):
- option_base.initialize_options(self)
- self.args = None
- self.remove = None
-
- def finalize_options(self):
- option_base.finalize_options(self)
- if self.remove and len(self.args)<>1:
- raise DistutilsOptionError(
- "Must specify exactly one argument (the alias name) when "
- "using --remove"
- )
-
- def run(self):
- aliases = self.distribution.get_option_dict('aliases')
-
- if not self.args:
- print "Command Aliases"
- print "---------------"
- for alias in aliases:
- print "setup.py alias", format_alias(alias, aliases)
- return
-
- elif len(self.args)==1:
- alias, = self.args
- if self.remove:
- command = None
- elif alias in aliases:
- print "setup.py alias", format_alias(alias, aliases)
- return
- else:
- print "No alias definition found for %r" % alias
- return
- else:
- alias = self.args[0]
- command = ' '.join(map(shquote,self.args[1:]))
-
- edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
-
-
-def format_alias(name, aliases):
- source, command = aliases[name]
- if source == config_file('global'):
- source = '--global-config '
- elif source == config_file('user'):
- source = '--user-config '
- elif source == config_file('local'):
- source = ''
- else:
- source = '--filename=%r' % source
- return source+name+' '+command
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py
deleted file mode 100755
index 90e1f525..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py
+++ /dev/null
@@ -1,540 +0,0 @@
-"""setuptools.command.bdist_egg
-
-Build .egg distributions"""
-
-# This module should be kept compatible with Python 2.3
-import sys, os, marshal
-from setuptools import Command
-from distutils.dir_util import remove_tree, mkpath
-try:
- from distutils.sysconfig import get_python_version, get_python_lib
-except ImportError:
- from sysconfig import get_python_version
- from distutils.sysconfig import get_python_lib
-
-from distutils import log
-from distutils.errors import DistutilsSetupError
-from pkg_resources import get_build_platform, Distribution, ensure_directory
-from pkg_resources import EntryPoint
-from types import CodeType
-from setuptools.extension import Library
-
-def strip_module(filename):
- if '.' in filename:
- filename = os.path.splitext(filename)[0]
- if filename.endswith('module'):
- filename = filename[:-6]
- return filename
-
-def write_stub(resource, pyfile):
- f = open(pyfile,'w')
- f.write('\n'.join([
- "def __bootstrap__():",
- " global __bootstrap__, __loader__, __file__",
- " import sys, pkg_resources, imp",
- " __file__ = pkg_resources.resource_filename(__name__,%r)"
- % resource,
- " __loader__ = None; del __bootstrap__, __loader__",
- " imp.load_dynamic(__name__,__file__)",
- "__bootstrap__()",
- "" # terminal \n
- ]))
- f.close()
-
-# stub __init__.py for packages distributed without one
-NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
-
-class bdist_egg(Command):
-
- description = "create an \"egg\" distribution"
-
- user_options = [
- ('bdist-dir=', 'b',
- "temporary directory for creating the distribution"),
- ('plat-name=', 'p',
- "platform name to embed in generated filenames "
- "(default: %s)" % get_build_platform()),
- ('exclude-source-files', None,
- "remove all .py files from the generated egg"),
- ('keep-temp', 'k',
- "keep the pseudo-installation tree around after " +
- "creating the distribution archive"),
- ('dist-dir=', 'd',
- "directory to put final built distributions in"),
- ('skip-build', None,
- "skip rebuilding everything (for testing/debugging)"),
- ]
-
- boolean_options = [
- 'keep-temp', 'skip-build', 'exclude-source-files'
- ]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def initialize_options (self):
- self.bdist_dir = None
- self.plat_name = None
- self.keep_temp = 0
- self.dist_dir = None
- self.skip_build = 0
- self.egg_output = None
- self.exclude_source_files = None
-
-
- def finalize_options(self):
- ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
- self.egg_info = ei_cmd.egg_info
-
- if self.bdist_dir is None:
- bdist_base = self.get_finalized_command('bdist').bdist_base
- self.bdist_dir = os.path.join(bdist_base, 'egg')
-
- if self.plat_name is None:
- self.plat_name = get_build_platform()
-
- self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
-
- if self.egg_output is None:
-
- # Compute filename of the output egg
- basename = Distribution(
- None, None, ei_cmd.egg_name, ei_cmd.egg_version,
- get_python_version(),
- self.distribution.has_ext_modules() and self.plat_name
- ).egg_name()
-
- self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
-
-
-
-
-
-
-
-
- def do_install_data(self):
- # Hack for packages that install data to install's --install-lib
- self.get_finalized_command('install').install_lib = self.bdist_dir
-
- site_packages = os.path.normcase(os.path.realpath(get_python_lib()))
- old, self.distribution.data_files = self.distribution.data_files,[]
-
- for item in old:
- if isinstance(item,tuple) and len(item)==2:
- if os.path.isabs(item[0]):
- realpath = os.path.realpath(item[0])
- normalized = os.path.normcase(realpath)
- if normalized==site_packages or normalized.startswith(
- site_packages+os.sep
- ):
- item = realpath[len(site_packages)+1:], item[1]
- # XXX else: raise ???
- self.distribution.data_files.append(item)
-
- try:
- log.info("installing package data to %s" % self.bdist_dir)
- self.call_command('install_data', force=0, root=None)
- finally:
- self.distribution.data_files = old
-
-
- def get_outputs(self):
- return [self.egg_output]
-
-
- def call_command(self,cmdname,**kw):
- """Invoke reinitialized command `cmdname` with keyword args"""
- for dirname in INSTALL_DIRECTORY_ATTRS:
- kw.setdefault(dirname,self.bdist_dir)
- kw.setdefault('skip_build',self.skip_build)
- kw.setdefault('dry_run', self.dry_run)
- cmd = self.reinitialize_command(cmdname, **kw)
- self.run_command(cmdname)
- return cmd
-
-
- def run(self):
- # Generate metadata first
- self.run_command("egg_info")
-
- # We run install_lib before install_data, because some data hacks
- # pull their data path from the install_lib command.
- log.info("installing library code to %s" % self.bdist_dir)
- instcmd = self.get_finalized_command('install')
- old_root = instcmd.root; instcmd.root = None
- cmd = self.call_command('install_lib', warn_dir=0)
- instcmd.root = old_root
-
- all_outputs, ext_outputs = self.get_ext_outputs()
- self.stubs = []
- to_compile = []
- for (p,ext_name) in enumerate(ext_outputs):
- filename,ext = os.path.splitext(ext_name)
- pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
- self.stubs.append(pyfile)
- log.info("creating stub loader for %s" % ext_name)
- if not self.dry_run:
- write_stub(os.path.basename(ext_name), pyfile)
- to_compile.append(pyfile)
- ext_outputs[p] = ext_name.replace(os.sep,'/')
-
- to_compile.extend(self.make_init_files())
- if to_compile:
- cmd.byte_compile(to_compile)
-
- if self.distribution.data_files:
- self.do_install_data()
-
- # Make the EGG-INFO directory
- archive_root = self.bdist_dir
- egg_info = os.path.join(archive_root,'EGG-INFO')
- self.mkpath(egg_info)
- if self.distribution.scripts:
- script_dir = os.path.join(egg_info, 'scripts')
- log.info("installing scripts to %s" % script_dir)
- self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
-
- self.copy_metadata_to(egg_info)
- native_libs = os.path.join(egg_info, "native_libs.txt")
- if all_outputs:
- log.info("writing %s" % native_libs)
- if not self.dry_run:
- ensure_directory(native_libs)
- libs_file = open(native_libs, 'wt')
- libs_file.write('\n'.join(all_outputs))
- libs_file.write('\n')
- libs_file.close()
- elif os.path.isfile(native_libs):
- log.info("removing %s" % native_libs)
- if not self.dry_run:
- os.unlink(native_libs)
-
- write_safety_flag(
- os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
- )
-
- if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
- log.warn(
- "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
- "Use the install_requires/extras_require setup() args instead."
- )
-
- if self.exclude_source_files:
- self.zap_pyfiles()
-
- # Make the archive
- make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
- dry_run=self.dry_run, mode=self.gen_header())
- if not self.keep_temp:
- remove_tree(self.bdist_dir, dry_run=self.dry_run)
-
- # Add to 'Distribution.dist_files' so that the "upload" command works
- getattr(self.distribution,'dist_files',[]).append(
- ('bdist_egg',get_python_version(),self.egg_output))
-
-
-
-
- def zap_pyfiles(self):
- log.info("Removing .py files from temporary directory")
- for base,dirs,files in walk_egg(self.bdist_dir):
- for name in files:
- if name.endswith('.py'):
- path = os.path.join(base,name)
- log.debug("Deleting %s", path)
- os.unlink(path)
-
- def zip_safe(self):
- safe = getattr(self.distribution,'zip_safe',None)
- if safe is not None:
- return safe
- log.warn("zip_safe flag not set; analyzing archive contents...")
- return analyze_egg(self.bdist_dir, self.stubs)
-
- def make_init_files(self):
- """Create missing package __init__ files"""
- init_files = []
- for base,dirs,files in walk_egg(self.bdist_dir):
- if base==self.bdist_dir:
- # don't put an __init__ in the root
- continue
- for name in files:
- if name.endswith('.py'):
- if '__init__.py' not in files:
- pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
- if self.distribution.has_contents_for(pkg):
- log.warn("Creating missing __init__.py for %s",pkg)
- filename = os.path.join(base,'__init__.py')
- if not self.dry_run:
- f = open(filename,'w'); f.write(NS_PKG_STUB)
- f.close()
- init_files.append(filename)
- break
- else:
- # not a package, don't traverse to subdirectories
- dirs[:] = []
-
- return init_files
-
- def gen_header(self):
- epm = EntryPoint.parse_map(self.distribution.entry_points or '')
- ep = epm.get('setuptools.installation',{}).get('eggsecutable')
- if ep is None:
- return 'w' # not an eggsecutable, do it the usual way.
-
- if not ep.attrs or ep.extras:
- raise DistutilsSetupError(
- "eggsecutable entry point (%r) cannot have 'extras' "
- "or refer to a module" % (ep,)
- )
-
- pyver = sys.version[:3]
- pkg = ep.module_name
- full = '.'.join(ep.attrs)
- base = ep.attrs[0]
- basename = os.path.basename(self.egg_output)
-
- header = (
- "#!/bin/sh\n"
- 'if [ `basename $0` = "%(basename)s" ]\n'
- 'then exec python%(pyver)s -c "'
- "import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
- "from %(pkg)s import %(base)s; sys.exit(%(full)s())"
- '" "$@"\n'
- 'else\n'
- ' echo $0 is not the correct name for this egg file.\n'
- ' echo Please rename it back to %(basename)s and try again.\n'
- ' exec false\n'
- 'fi\n'
-
- ) % locals()
-
- if not self.dry_run:
- mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
- f = open(self.egg_output, 'w')
- f.write(header)
- f.close()
- return 'a'
-
-
- def copy_metadata_to(self, target_dir):
- prefix = os.path.join(self.egg_info,'')
- for path in self.ei_cmd.filelist.files:
- if path.startswith(prefix):
- target = os.path.join(target_dir, path[len(prefix):])
- ensure_directory(target)
- self.copy_file(path, target)
-
- def get_ext_outputs(self):
- """Get a list of relative paths to C extensions in the output distro"""
-
- all_outputs = []
- ext_outputs = []
-
- paths = {self.bdist_dir:''}
- for base, dirs, files in os.walk(self.bdist_dir):
- for filename in files:
- if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
- all_outputs.append(paths[base]+filename)
- for filename in dirs:
- paths[os.path.join(base,filename)] = paths[base]+filename+'/'
-
- if self.distribution.has_ext_modules():
- build_cmd = self.get_finalized_command('build_ext')
- for ext in build_cmd.extensions:
- if isinstance(ext,Library):
- continue
- fullname = build_cmd.get_ext_fullname(ext.name)
- filename = build_cmd.get_ext_filename(fullname)
- if not os.path.basename(filename).startswith('dl-'):
- if os.path.exists(os.path.join(self.bdist_dir,filename)):
- ext_outputs.append(filename)
-
- return all_outputs, ext_outputs
-
-
-NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
-
-
-
-
-def walk_egg(egg_dir):
- """Walk an unpacked egg's contents, skipping the metadata directory"""
- walker = os.walk(egg_dir)
- base,dirs,files = walker.next()
- if 'EGG-INFO' in dirs:
- dirs.remove('EGG-INFO')
- yield base,dirs,files
- for bdf in walker:
- yield bdf
-
-def analyze_egg(egg_dir, stubs):
- # check for existing flag in EGG-INFO
- for flag,fn in safety_flags.items():
- if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
- return flag
- if not can_scan(): return False
- safe = True
- for base, dirs, files in walk_egg(egg_dir):
- for name in files:
- if name.endswith('.py') or name.endswith('.pyw'):
- continue
- elif name.endswith('.pyc') or name.endswith('.pyo'):
- # always scan, even if we already know we're not safe
- safe = scan_module(egg_dir, base, name, stubs) and safe
- return safe
-
-def write_safety_flag(egg_dir, safe):
- # Write or remove zip safety flag file(s)
- for flag,fn in safety_flags.items():
- fn = os.path.join(egg_dir, fn)
- if os.path.exists(fn):
- if safe is None or bool(safe)<>flag:
- os.unlink(fn)
- elif safe is not None and bool(safe)==flag:
- f=open(fn,'wt'); f.write('\n'); f.close()
-
-safety_flags = {
- True: 'zip-safe',
- False: 'not-zip-safe',
-}
-
-def scan_module(egg_dir, base, name, stubs):
- """Check whether module possibly uses unsafe-for-zipfile stuff"""
-
- filename = os.path.join(base,name)
- if filename[:-1] in stubs:
- return True # Extension module
- pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
- module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
- f = open(filename,'rb'); f.read(8) # skip magic & date
- code = marshal.load(f); f.close()
- safe = True
- symbols = dict.fromkeys(iter_symbols(code))
- for bad in ['__file__', '__path__']:
- if bad in symbols:
- log.warn("%s: module references %s", module, bad)
- safe = False
- if 'inspect' in symbols:
- for bad in [
- 'getsource', 'getabsfile', 'getsourcefile', 'getfile'
- 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
- 'getinnerframes', 'getouterframes', 'stack', 'trace'
- ]:
- if bad in symbols:
- log.warn("%s: module MAY be using inspect.%s", module, bad)
- safe = False
- if '__name__' in symbols and '__main__' in symbols and '.' not in module:
- if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5
- log.warn("%s: top-level module may be 'python -m' script", module)
- safe = False
- return safe
-
-def iter_symbols(code):
- """Yield names and strings used by `code` and its nested code objects"""
- for name in code.co_names: yield name
- for const in code.co_consts:
- if isinstance(const,basestring):
- yield const
- elif isinstance(const,CodeType):
- for name in iter_symbols(const):
- yield name
-
-def can_scan():
- if not sys.platform.startswith('java') and sys.platform != 'cli':
- # CPython, PyPy, etc.
- return True
- log.warn("Unable to analyze compiled code on this platform.")
- log.warn("Please ask the author to include a 'zip_safe'"
- " setting (either True or False) in the package's setup.py")
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-# Attribute names of options for commands that might need to be convinced to
-# install to the egg build directory
-
-INSTALL_DIRECTORY_ATTRS = [
- 'install_lib', 'install_dir', 'install_data', 'install_base'
-]
-
-def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
- mode='w'
-):
- """Create a zip file from all the files under 'base_dir'. The output
- zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
- Python module (if available) or the InfoZIP "zip" utility (if installed
- and found on the default search path). If neither tool is available,
- raises DistutilsExecError. Returns the name of the output zip file.
- """
- import zipfile
- mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
- log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
-
- def visit(z, dirname, names):
- for name in names:
- path = os.path.normpath(os.path.join(dirname, name))
- if os.path.isfile(path):
- p = path[len(base_dir)+1:]
- if not dry_run:
- z.write(path, p)
- log.debug("adding '%s'" % p)
-
- if compress is None:
- compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
-
- compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
- if not dry_run:
- z = zipfile.ZipFile(zip_filename, mode, compression=compression)
- for dirname, dirs, files in os.walk(base_dir):
- visit(z, dirname, files)
- z.close()
- else:
- for dirname, dirs, files in os.walk(base_dir):
- visit(None, dirname, file)
- return zip_filename
-#
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py
deleted file mode 100755
index 8c48da35..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# This is just a kludge so that bdist_rpm doesn't guess wrong about the
-# distribution name and version, if the egg_info command is going to alter
-# them, another kludge to allow you to build old-style non-egg RPMs, and
-# finally, a kludge to track .rpm files for uploading when run on Python <2.5.
-
-from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
-import sys, os
-
-class bdist_rpm(_bdist_rpm):
-
- def initialize_options(self):
- _bdist_rpm.initialize_options(self)
- self.no_egg = None
-
- if sys.version<"2.5":
- # Track for uploading any .rpm file(s) moved to self.dist_dir
- def move_file(self, src, dst, level=1):
- _bdist_rpm.move_file(self, src, dst, level)
- if dst==self.dist_dir and src.endswith('.rpm'):
- getattr(self.distribution,'dist_files',[]).append(
- ('bdist_rpm',
- src.endswith('.src.rpm') and 'any' or sys.version[:3],
- os.path.join(dst, os.path.basename(src)))
- )
-
- def run(self):
- self.run_command('egg_info') # ensure distro name is up-to-date
- _bdist_rpm.run(self)
-
-
-
-
-
-
-
-
-
-
-
-
-
- def _make_spec_file(self):
- version = self.distribution.get_version()
- rpmversion = version.replace('-','_')
- spec = _bdist_rpm._make_spec_file(self)
- line23 = '%define version '+version
- line24 = '%define version '+rpmversion
- spec = [
- line.replace(
- "Source0: %{name}-%{version}.tar",
- "Source0: %{name}-%{unmangled_version}.tar"
- ).replace(
- "setup.py install ",
- "setup.py install --single-version-externally-managed "
- ).replace(
- "%setup",
- "%setup -n %{name}-%{unmangled_version}"
- ).replace(line23,line24)
- for line in spec
- ]
- spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
- return spec
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py
deleted file mode 100755
index 93e6846d..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
-import os, sys
-
-class bdist_wininst(_bdist_wininst):
-
- def create_exe(self, arcname, fullname, bitmap=None):
- _bdist_wininst.create_exe(self, arcname, fullname, bitmap)
- dist_files = getattr(self.distribution, 'dist_files', [])
-
- if self.target_version:
- installer_name = os.path.join(self.dist_dir,
- "%s.win32-py%s.exe" %
- (fullname, self.target_version))
- pyversion = self.target_version
-
- # fix 2.5 bdist_wininst ignoring --target-version spec
- bad = ('bdist_wininst','any',installer_name)
- if bad in dist_files:
- dist_files.remove(bad)
- else:
- installer_name = os.path.join(self.dist_dir,
- "%s.win32.exe" % fullname)
- pyversion = 'any'
- good = ('bdist_wininst', pyversion, installer_name)
- if good not in dist_files:
- dist_files.append(good)
-
- def reinitialize_command (self, command, reinit_subcommands=0):
- cmd = self.distribution.reinitialize_command(
- command, reinit_subcommands)
- if command in ('install', 'install_lib'):
- cmd.install_lib = None # work around distutils bug
- return cmd
-
- def run(self):
- self._is_running = True
- try:
- _bdist_wininst.run(self)
- finally:
- self._is_running = False
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py
deleted file mode 100755
index 4a94572c..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py
+++ /dev/null
@@ -1,294 +0,0 @@
-from distutils.command.build_ext import build_ext as _du_build_ext
-try:
- # Attempt to use Pyrex for building extensions, if available
- from Pyrex.Distutils.build_ext import build_ext as _build_ext
-except ImportError:
- _build_ext = _du_build_ext
-
-import os, sys
-from distutils.file_util import copy_file
-from setuptools.extension import Library
-from distutils.ccompiler import new_compiler
-from distutils.sysconfig import customize_compiler, get_config_var
-get_config_var("LDSHARED") # make sure _config_vars is initialized
-from distutils.sysconfig import _config_vars
-from distutils import log
-from distutils.errors import *
-
-have_rtld = False
-use_stubs = False
-libtype = 'shared'
-
-if sys.platform == "darwin":
- use_stubs = True
-elif os.name != 'nt':
- try:
- from dl import RTLD_NOW
- have_rtld = True
- use_stubs = True
- except ImportError:
- pass
-
-def if_dl(s):
- if have_rtld:
- return s
- return ''
-
-
-
-
-
-
-class build_ext(_build_ext):
- def run(self):
- """Build extensions in build directory, then copy if --inplace"""
- old_inplace, self.inplace = self.inplace, 0
- _build_ext.run(self)
- self.inplace = old_inplace
- if old_inplace:
- self.copy_extensions_to_source()
-
- def copy_extensions_to_source(self):
- build_py = self.get_finalized_command('build_py')
- for ext in self.extensions:
- fullname = self.get_ext_fullname(ext.name)
- filename = self.get_ext_filename(fullname)
- modpath = fullname.split('.')
- package = '.'.join(modpath[:-1])
- package_dir = build_py.get_package_dir(package)
- dest_filename = os.path.join(package_dir,os.path.basename(filename))
- src_filename = os.path.join(self.build_lib,filename)
-
- # Always copy, even if source is older than destination, to ensure
- # that the right extensions for the current Python/platform are
- # used.
- copy_file(
- src_filename, dest_filename, verbose=self.verbose,
- dry_run=self.dry_run
- )
- if ext._needs_stub:
- self.write_stub(package_dir or os.curdir, ext, True)
-
-
- if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
- # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
- def swig_sources(self, sources, *otherargs):
- # first do any Pyrex processing
- sources = _build_ext.swig_sources(self, sources) or sources
- # Then do any actual SWIG stuff on the remainder
- return _du_build_ext.swig_sources(self, sources, *otherargs)
-
-
-
- def get_ext_filename(self, fullname):
- filename = _build_ext.get_ext_filename(self,fullname)
- if fullname not in self.ext_map:
- return filename
- ext = self.ext_map[fullname]
- if isinstance(ext,Library):
- fn, ext = os.path.splitext(filename)
- return self.shlib_compiler.library_filename(fn,libtype)
- elif use_stubs and ext._links_to_dynamic:
- d,fn = os.path.split(filename)
- return os.path.join(d,'dl-'+fn)
- else:
- return filename
-
- def initialize_options(self):
- _build_ext.initialize_options(self)
- self.shlib_compiler = None
- self.shlibs = []
- self.ext_map = {}
-
- def finalize_options(self):
- _build_ext.finalize_options(self)
- self.extensions = self.extensions or []
- self.check_extensions_list(self.extensions)
- self.shlibs = [ext for ext in self.extensions
- if isinstance(ext,Library)]
- if self.shlibs:
- self.setup_shlib_compiler()
- for ext in self.extensions:
- ext._full_name = self.get_ext_fullname(ext.name)
- for ext in self.extensions:
- fullname = ext._full_name
- self.ext_map[fullname] = ext
-
- # distutils 3.1 will also ask for module names
- # XXX what to do with conflicts?
- self.ext_map[fullname.split('.')[-1]] = ext
-
- ltd = ext._links_to_dynamic = \
- self.shlibs and self.links_to_dynamic(ext) or False
- ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
- filename = ext._file_name = self.get_ext_filename(fullname)
- libdir = os.path.dirname(os.path.join(self.build_lib,filename))
- if ltd and libdir not in ext.library_dirs:
- ext.library_dirs.append(libdir)
- if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
- ext.runtime_library_dirs.append(os.curdir)
-
- def setup_shlib_compiler(self):
- compiler = self.shlib_compiler = new_compiler(
- compiler=self.compiler, dry_run=self.dry_run, force=self.force
- )
- if sys.platform == "darwin":
- tmp = _config_vars.copy()
- try:
- # XXX Help! I don't have any idea whether these are right...
- _config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
- _config_vars['CCSHARED'] = " -dynamiclib"
- _config_vars['SO'] = ".dylib"
- customize_compiler(compiler)
- finally:
- _config_vars.clear()
- _config_vars.update(tmp)
- else:
- customize_compiler(compiler)
-
- if self.include_dirs is not None:
- compiler.set_include_dirs(self.include_dirs)
- if self.define is not None:
- # 'define' option is a list of (name,value) tuples
- for (name,value) in self.define:
- compiler.define_macro(name, value)
- if self.undef is not None:
- for macro in self.undef:
- compiler.undefine_macro(macro)
- if self.libraries is not None:
- compiler.set_libraries(self.libraries)
- if self.library_dirs is not None:
- compiler.set_library_dirs(self.library_dirs)
- if self.rpath is not None:
- compiler.set_runtime_library_dirs(self.rpath)
- if self.link_objects is not None:
- compiler.set_link_objects(self.link_objects)
-
- # hack so distutils' build_extension() builds a library instead
- compiler.link_shared_object = link_shared_object.__get__(compiler)
-
-
-
- def get_export_symbols(self, ext):
- if isinstance(ext,Library):
- return ext.export_symbols
- return _build_ext.get_export_symbols(self,ext)
-
- def build_extension(self, ext):
- _compiler = self.compiler
- try:
- if isinstance(ext,Library):
- self.compiler = self.shlib_compiler
- _build_ext.build_extension(self,ext)
- if ext._needs_stub:
- self.write_stub(
- self.get_finalized_command('build_py').build_lib, ext
- )
- finally:
- self.compiler = _compiler
-
- def links_to_dynamic(self, ext):
- """Return true if 'ext' links to a dynamic lib in the same package"""
- # XXX this should check to ensure the lib is actually being built
- # XXX as dynamic, and not just using a locally-found version or a
- # XXX static-compiled version
- libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
- pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
- for libname in ext.libraries:
- if pkg+libname in libnames: return True
- return False
-
- def get_outputs(self):
- outputs = _build_ext.get_outputs(self)
- optimize = self.get_finalized_command('build_py').optimize
- for ext in self.extensions:
- if ext._needs_stub:
- base = os.path.join(self.build_lib, *ext._full_name.split('.'))
- outputs.append(base+'.py')
- outputs.append(base+'.pyc')
- if optimize:
- outputs.append(base+'.pyo')
- return outputs
-
- def write_stub(self, output_dir, ext, compile=False):
- log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
- stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
- if compile and os.path.exists(stub_file):
- raise DistutilsError(stub_file+" already exists! Please delete.")
- if not self.dry_run:
- f = open(stub_file,'w')
- f.write('\n'.join([
- "def __bootstrap__():",
- " global __bootstrap__, __file__, __loader__",
- " import sys, os, pkg_resources, imp"+if_dl(", dl"),
- " __file__ = pkg_resources.resource_filename(__name__,%r)"
- % os.path.basename(ext._file_name),
- " del __bootstrap__",
- " if '__loader__' in globals():",
- " del __loader__",
- if_dl(" old_flags = sys.getdlopenflags()"),
- " old_dir = os.getcwd()",
- " try:",
- " os.chdir(os.path.dirname(__file__))",
- if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
- " imp.load_dynamic(__name__,__file__)",
- " finally:",
- if_dl(" sys.setdlopenflags(old_flags)"),
- " os.chdir(old_dir)",
- "__bootstrap__()",
- "" # terminal \n
- ]))
- f.close()
- if compile:
- from distutils.util import byte_compile
- byte_compile([stub_file], optimize=0,
- force=True, dry_run=self.dry_run)
- optimize = self.get_finalized_command('install_lib').optimize
- if optimize > 0:
- byte_compile([stub_file], optimize=optimize,
- force=True, dry_run=self.dry_run)
- if os.path.exists(stub_file) and not self.dry_run:
- os.unlink(stub_file)
-
-
-if use_stubs or os.name=='nt':
- # Build shared libraries
- #
- def link_shared_object(self, objects, output_libname, output_dir=None,
- libraries=None, library_dirs=None, runtime_library_dirs=None,
- export_symbols=None, debug=0, extra_preargs=None,
- extra_postargs=None, build_temp=None, target_lang=None
- ): self.link(
- self.SHARED_LIBRARY, objects, output_libname,
- output_dir, libraries, library_dirs, runtime_library_dirs,
- export_symbols, debug, extra_preargs, extra_postargs,
- build_temp, target_lang
- )
-else:
- # Build static libraries everywhere else
- libtype = 'static'
-
- def link_shared_object(self, objects, output_libname, output_dir=None,
- libraries=None, library_dirs=None, runtime_library_dirs=None,
- export_symbols=None, debug=0, extra_preargs=None,
- extra_postargs=None, build_temp=None, target_lang=None
- ):
- # XXX we need to either disallow these attrs on Library instances,
- # or warn/abort here if set, or something...
- #libraries=None, library_dirs=None, runtime_library_dirs=None,
- #export_symbols=None, extra_preargs=None, extra_postargs=None,
- #build_temp=None
-
- assert output_dir is None # distutils build_ext doesn't pass this
- output_dir,filename = os.path.split(output_libname)
- basename, ext = os.path.splitext(filename)
- if self.library_filename("x").startswith('lib'):
- # strip 'lib' prefix; this is kludgy if some platform uses
- # a different prefix
- basename = basename[3:]
-
- self.create_static_lib(
- objects, basename, output_dir, debug, target_lang
- )
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py
deleted file mode 100755
index a01e2843..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py
+++ /dev/null
@@ -1,268 +0,0 @@
-import os.path, sys, fnmatch
-from distutils.command.build_py import build_py as _build_py
-from distutils.util import convert_path
-from glob import glob
-
-try:
- from distutils.util import Mixin2to3 as _Mixin2to3
- # add support for converting doctests that is missing in 3.1 distutils
- from distutils import log
- from lib2to3.refactor import RefactoringTool, get_fixers_from_package
- import setuptools
- class DistutilsRefactoringTool(RefactoringTool):
- def log_error(self, msg, *args, **kw):
- log.error(msg, *args)
-
- def log_message(self, msg, *args):
- log.info(msg, *args)
-
- def log_debug(self, msg, *args):
- log.debug(msg, *args)
-
- class Mixin2to3(_Mixin2to3):
- def run_2to3(self, files, doctests = False):
- # See of the distribution option has been set, otherwise check the
- # setuptools default.
- if self.distribution.use_2to3 is not True:
- return
- if not files:
- return
- log.info("Fixing "+" ".join(files))
- if not self.fixer_names:
- self.fixer_names = []
- for p in setuptools.lib2to3_fixer_packages:
- self.fixer_names.extend(get_fixers_from_package(p))
- if self.distribution.use_2to3_fixers is not None:
- for p in self.distribution.use_2to3_fixers:
- self.fixer_names.extend(get_fixers_from_package(p))
- if doctests:
- if setuptools.run_2to3_on_doctests:
- r = DistutilsRefactoringTool(self.fixer_names)
- r.refactor(files, write=True, doctests_only=True)
- else:
- _Mixin2to3.run_2to3(self, files)
-
-except ImportError:
- class Mixin2to3:
- def run_2to3(self, files, doctests=True):
- # Nothing done in 2.x
- pass
-
-class build_py(_build_py, Mixin2to3):
- """Enhanced 'build_py' command that includes data files with packages
-
- The data files are specified via a 'package_data' argument to 'setup()'.
- See 'setuptools.dist.Distribution' for more details.
-
- Also, this version of the 'build_py' command allows you to specify both
- 'py_modules' and 'packages' in the same setup operation.
- """
- def finalize_options(self):
- _build_py.finalize_options(self)
- self.package_data = self.distribution.package_data
- self.exclude_package_data = self.distribution.exclude_package_data or {}
- if 'data_files' in self.__dict__: del self.__dict__['data_files']
- self.__updated_files = []
- self.__doctests_2to3 = []
-
- def run(self):
- """Build modules, packages, and copy data files to build directory"""
- if not self.py_modules and not self.packages:
- return
-
- if self.py_modules:
- self.build_modules()
-
- if self.packages:
- self.build_packages()
- self.build_package_data()
-
- self.run_2to3(self.__updated_files, False)
- self.run_2to3(self.__updated_files, True)
- self.run_2to3(self.__doctests_2to3, True)
-
- # Only compile actual .py files, using our base class' idea of what our
- # output files are.
- self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
-
- def __getattr__(self,attr):
- if attr=='data_files': # lazily compute data files
- self.data_files = files = self._get_data_files(); return files
- return _build_py.__getattr__(self,attr)
-
- def build_module(self, module, module_file, package):
- outfile, copied = _build_py.build_module(self, module, module_file, package)
- if copied:
- self.__updated_files.append(outfile)
- return outfile, copied
-
- def _get_data_files(self):
- """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
- self.analyze_manifest()
- data = []
- for package in self.packages or ():
- # Locate package source directory
- src_dir = self.get_package_dir(package)
-
- # Compute package build directory
- build_dir = os.path.join(*([self.build_lib] + package.split('.')))
-
- # Length of path to strip from found files
- plen = len(src_dir)+1
-
- # Strip directory from globbed filenames
- filenames = [
- file[plen:] for file in self.find_data_files(package, src_dir)
- ]
- data.append( (package, src_dir, build_dir, filenames) )
- return data
-
- def find_data_files(self, package, src_dir):
- """Return filenames for package's data files in 'src_dir'"""
- globs = (self.package_data.get('', [])
- + self.package_data.get(package, []))
- files = self.manifest_files.get(package, [])[:]
- for pattern in globs:
- # Each pattern has to be converted to a platform-specific path
- files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
- return self.exclude_data_files(package, src_dir, files)
-
- def build_package_data(self):
- """Copy data files into build directory"""
- lastdir = None
- for package, src_dir, build_dir, filenames in self.data_files:
- for filename in filenames:
- target = os.path.join(build_dir, filename)
- self.mkpath(os.path.dirname(target))
- srcfile = os.path.join(src_dir, filename)
- outf, copied = self.copy_file(srcfile, target)
- srcfile = os.path.abspath(srcfile)
- if copied and srcfile in self.distribution.convert_2to3_doctests:
- self.__doctests_2to3.append(outf)
-
-
- def analyze_manifest(self):
- self.manifest_files = mf = {}
- if not self.distribution.include_package_data:
- return
- src_dirs = {}
- for package in self.packages or ():
- # Locate package source directory
- src_dirs[assert_relative(self.get_package_dir(package))] = package
-
- self.run_command('egg_info')
- ei_cmd = self.get_finalized_command('egg_info')
- for path in ei_cmd.filelist.files:
- d,f = os.path.split(assert_relative(path))
- prev = None
- oldf = f
- while d and d!=prev and d not in src_dirs:
- prev = d
- d, df = os.path.split(d)
- f = os.path.join(df, f)
- if d in src_dirs:
- if path.endswith('.py') and f==oldf:
- continue # it's a module, not data
- mf.setdefault(src_dirs[d],[]).append(path)
-
- def get_data_files(self): pass # kludge 2.4 for lazy computation
-
- if sys.version<"2.4": # Python 2.4 already has this code
- def get_outputs(self, include_bytecode=1):
- """Return complete list of files copied to the build directory
-
- This includes both '.py' files and data files, as well as '.pyc'
- and '.pyo' files if 'include_bytecode' is true. (This method is
- needed for the 'install_lib' command to do its job properly, and to
- generate a correct installation manifest.)
- """
- return _build_py.get_outputs(self, include_bytecode) + [
- os.path.join(build_dir, filename)
- for package, src_dir, build_dir,filenames in self.data_files
- for filename in filenames
- ]
-
- def check_package(self, package, package_dir):
- """Check namespace packages' __init__ for declare_namespace"""
- try:
- return self.packages_checked[package]
- except KeyError:
- pass
-
- init_py = _build_py.check_package(self, package, package_dir)
- self.packages_checked[package] = init_py
-
- if not init_py or not self.distribution.namespace_packages:
- return init_py
-
- for pkg in self.distribution.namespace_packages:
- if pkg==package or pkg.startswith(package+'.'):
- break
- else:
- return init_py
-
- f = open(init_py,'rU')
- if 'declare_namespace' not in f.read():
- from distutils import log
- log.warn(
- "WARNING: %s is a namespace package, but its __init__.py does\n"
- "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n"
- '(See the setuptools manual under "Namespace Packages" for '
- "details.)\n", package
- )
- f.close()
- return init_py
-
- def initialize_options(self):
- self.packages_checked={}
- _build_py.initialize_options(self)
-
-
- def get_package_dir(self, package):
- res = _build_py.get_package_dir(self, package)
- if self.distribution.src_root is not None:
- return os.path.join(self.distribution.src_root, res)
- return res
-
-
- def exclude_data_files(self, package, src_dir, files):
- """Filter filenames for package's data files in 'src_dir'"""
- globs = (self.exclude_package_data.get('', [])
- + self.exclude_package_data.get(package, []))
- bad = []
- for pattern in globs:
- bad.extend(
- fnmatch.filter(
- files, os.path.join(src_dir, convert_path(pattern))
- )
- )
- bad = dict.fromkeys(bad)
- seen = {}
- return [
- f for f in files if f not in bad
- and f not in seen and seen.setdefault(f,1) # ditch dupes
- ]
-
-
-def assert_relative(path):
- if not os.path.isabs(path):
- return path
- from distutils.errors import DistutilsSetupError
- raise DistutilsSetupError(
-"""Error: setup script specifies an absolute path:
-
- %s
-
-setup() arguments must *always* be /-separated paths relative to the
-setup.py directory, *never* absolute paths.
-""" % path
- )
-
-
-
-
-
-
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py
deleted file mode 100755
index 93b7773c..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py
+++ /dev/null
@@ -1,141 +0,0 @@
-from setuptools.command.easy_install import easy_install
-from distutils.util import convert_path, subst_vars
-from pkg_resources import Distribution, PathMetadata, normalize_path
-from distutils import log
-from distutils.errors import DistutilsError, DistutilsOptionError
-import os, setuptools, glob
-
-class develop(easy_install):
- """Set up package for development"""
-
- description = "install package in 'development mode'"
-
- user_options = easy_install.user_options + [
- ("uninstall", "u", "Uninstall this source package"),
- ("egg-path=", None, "Set the path to be used in the .egg-link file"),
- ]
-
- boolean_options = easy_install.boolean_options + ['uninstall']
-
- command_consumes_arguments = False # override base
-
- def run(self):
- if self.uninstall:
- self.multi_version = True
- self.uninstall_link()
- else:
- self.install_for_development()
- self.warn_deprecated_options()
-
- def initialize_options(self):
- self.uninstall = None
- self.egg_path = None
- easy_install.initialize_options(self)
- self.setup_path = None
- self.always_copy_from = '.' # always copy eggs installed in curdir
-
-
-
- def finalize_options(self):
- ei = self.get_finalized_command("egg_info")
- if ei.broken_egg_info:
- raise DistutilsError(
- "Please rename %r to %r before using 'develop'"
- % (ei.egg_info, ei.broken_egg_info)
- )
- self.args = [ei.egg_name]
-
-
-
-
- easy_install.finalize_options(self)
- self.expand_basedirs()
- self.expand_dirs()
- # pick up setup-dir .egg files only: no .egg-info
- self.package_index.scan(glob.glob('*.egg'))
-
- self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
- self.egg_base = ei.egg_base
- if self.egg_path is None:
- self.egg_path = os.path.abspath(ei.egg_base)
-
- target = normalize_path(self.egg_base)
- if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
- raise DistutilsOptionError(
- "--egg-path must be a relative path from the install"
- " directory to "+target
- )
-
- # Make a distribution for the package's source
- self.dist = Distribution(
- target,
- PathMetadata(target, os.path.abspath(ei.egg_info)),
- project_name = ei.egg_name
- )
-
- p = self.egg_base.replace(os.sep,'/')
- if p!= os.curdir:
- p = '../' * (p.count('/')+1)
- self.setup_path = p
- p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
- if p != normalize_path(os.curdir):
- raise DistutilsOptionError(
- "Can't get a consistent path to setup script from"
- " installation directory", p, normalize_path(os.curdir))
-
- def install_for_development(self):
- # Ensure metadata is up-to-date
- self.run_command('egg_info')
- # Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
- self.run_command('build_ext')
- self.install_site_py() # ensure that target dir is site-safe
- if setuptools.bootstrap_install_from:
- self.easy_install(setuptools.bootstrap_install_from)
- setuptools.bootstrap_install_from = None
-
- # create an .egg-link in the installation dir, pointing to our egg
- log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
- if not self.dry_run:
- f = open(self.egg_link,"w")
- f.write(self.egg_path + "\n" + self.setup_path)
- f.close()
- # postprocess the installed distro, fixing up .pth, installing scripts,
- # and handling requirements
- self.process_distribution(None, self.dist, not self.no_deps)
-
-
- def uninstall_link(self):
- if os.path.exists(self.egg_link):
- log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
- contents = [line.rstrip() for line in open(self.egg_link)]
- if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
- log.warn("Link points to %s: uninstall aborted", contents)
- return
- if not self.dry_run:
- os.unlink(self.egg_link)
- if not self.dry_run:
- self.update_pth(self.dist) # remove any .pth link to us
- if self.distribution.scripts:
- # XXX should also check for entry point scripts!
- log.warn("Note: you must uninstall or replace scripts manually!")
-
- def install_egg_scripts(self, dist):
- if dist is not self.dist:
- # Installing a dependency, so fall back to normal behavior
- return easy_install.install_egg_scripts(self,dist)
-
- # create wrapper scripts in the script dir, pointing to dist.scripts
-
- # new-style...
- self.install_wrapper_scripts(dist)
-
- # ...and old-style
- for script_name in self.distribution.scripts or []:
- script_path = os.path.abspath(convert_path(script_name))
- script_name = os.path.basename(script_path)
- f = open(script_path,'rU')
- script_text = f.read()
- f.close()
- self.install_script(dist, script_name, script_text, script_path)
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py
deleted file mode 100755
index 27fd00c7..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py
+++ /dev/null
@@ -1,1865 +0,0 @@
-#!python
-"""\
-Easy Install
-------------
-
-A tool for doing automatic download/extract/build of distutils-based Python
-packages. For detailed documentation, see the accompanying EasyInstall.txt
-file, or visit the `EasyInstall home page`__.
-
-__ http://packages.python.org/distribute/easy_install.html
-
-"""
-import sys, os.path, zipimport, shutil, tempfile, zipfile, re, stat, random
-from glob import glob
-from setuptools import Command, _dont_write_bytecode
-from setuptools.sandbox import run_setup
-from distutils import log, dir_util
-from distutils.util import convert_path, subst_vars
-from distutils.sysconfig import get_python_lib, get_config_vars
-from distutils.errors import DistutilsArgError, DistutilsOptionError, \
- DistutilsError, DistutilsPlatformError
-from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
-from setuptools.archive_util import unpack_archive
-from setuptools.package_index import PackageIndex
-from setuptools.package_index import URL_SCHEME
-from setuptools.command import bdist_egg, egg_info
-from pkg_resources import yield_lines, normalize_path, resource_string, \
- ensure_directory, get_distribution, find_distributions, \
- Environment, Requirement, Distribution, \
- PathMetadata, EggMetadata, WorkingSet, \
- DistributionNotFound, VersionConflict, \
- DEVELOP_DIST
-
-sys_executable = os.path.normpath(sys.executable)
-
-__all__ = [
- 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
- 'main', 'get_exe_prefixes',
-]
-
-import site
-HAS_USER_SITE = not sys.version < "2.6" and site.ENABLE_USER_SITE
-
-def samefile(p1,p2):
- if hasattr(os.path,'samefile') and (
- os.path.exists(p1) and os.path.exists(p2)
- ):
- return os.path.samefile(p1,p2)
- return (
- os.path.normpath(os.path.normcase(p1)) ==
- os.path.normpath(os.path.normcase(p2))
- )
-
-if sys.version_info <= (3,):
- def _to_ascii(s):
- return s
- def isascii(s):
- try:
- unicode(s, 'ascii')
- return True
- except UnicodeError:
- return False
-else:
- def _to_ascii(s):
- return s.encode('ascii')
- def isascii(s):
- try:
- s.encode('ascii')
- return True
- except UnicodeError:
- return False
-
-class easy_install(Command):
- """Manage a download/build/install process"""
- description = "Find/get/install Python packages"
- command_consumes_arguments = True
-
- user_options = [
- ('prefix=', None, "installation prefix"),
- ("zip-ok", "z", "install package as a zipfile"),
- ("multi-version", "m", "make apps have to require() a version"),
- ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
- ("install-dir=", "d", "install package to DIR"),
- ("script-dir=", "s", "install scripts to DIR"),
- ("exclude-scripts", "x", "Don't install scripts"),
- ("always-copy", "a", "Copy all needed packages to install dir"),
- ("index-url=", "i", "base URL of Python Package Index"),
- ("find-links=", "f", "additional URL(s) to search for packages"),
- ("delete-conflicting", "D", "no longer needed; don't use this"),
- ("ignore-conflicts-at-my-risk", None,
- "no longer needed; don't use this"),
- ("build-directory=", "b",
- "download/extract/build in DIR; keep the results"),
- ('optimize=', 'O',
- "also compile with optimization: -O1 for \"python -O\", "
- "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
- ('record=', None,
- "filename in which to record list of installed files"),
- ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
- ('site-dirs=','S',"list of directories where .pth files work"),
- ('editable', 'e', "Install specified packages in editable form"),
- ('no-deps', 'N', "don't install dependencies"),
- ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
- ('local-snapshots-ok', 'l', "allow building eggs from local checkouts"),
- ('version', None, "print version information and exit"),
- ('no-find-links', None,
- "Don't load find-links defined in packages being installed")
- ]
- boolean_options = [
- 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
- 'delete-conflicting', 'ignore-conflicts-at-my-risk', 'editable',
- 'no-deps', 'local-snapshots-ok', 'version'
- ]
-
- if HAS_USER_SITE:
- user_options.append(('user', None,
- "install in user site-package '%s'" % site.USER_SITE))
- boolean_options.append('user')
-
-
- negative_opt = {'always-unzip': 'zip-ok'}
- create_index = PackageIndex
-
- def initialize_options(self):
- if HAS_USER_SITE:
- whereami = os.path.abspath(__file__)
- self.user = whereami.startswith(site.USER_SITE)
- else:
- self.user = 0
-
- self.zip_ok = self.local_snapshots_ok = None
- self.install_dir = self.script_dir = self.exclude_scripts = None
- self.index_url = None
- self.find_links = None
- self.build_directory = None
- self.args = None
- self.optimize = self.record = None
- self.upgrade = self.always_copy = self.multi_version = None
- self.editable = self.no_deps = self.allow_hosts = None
- self.root = self.prefix = self.no_report = None
- self.version = None
- self.install_purelib = None # for pure module distributions
- self.install_platlib = None # non-pure (dists w/ extensions)
- self.install_headers = None # for C/C++ headers
- self.install_lib = None # set to either purelib or platlib
- self.install_scripts = None
- self.install_data = None
- self.install_base = None
- self.install_platbase = None
- if HAS_USER_SITE:
- self.install_userbase = site.USER_BASE
- self.install_usersite = site.USER_SITE
- else:
- self.install_userbase = None
- self.install_usersite = None
- self.no_find_links = None
-
- # Options not specifiable via command line
- self.package_index = None
- self.pth_file = self.always_copy_from = None
- self.delete_conflicting = None
- self.ignore_conflicts_at_my_risk = None
- self.site_dirs = None
- self.installed_projects = {}
- self.sitepy_installed = False
- # Always read easy_install options, even if we are subclassed, or have
- # an independent instance created. This ensures that defaults will
- # always come from the standard configuration file(s)' "easy_install"
- # section, even if this is a "develop" or "install" command, or some
- # other embedding.
- self._dry_run = None
- self.verbose = self.distribution.verbose
- self.distribution._set_command_options(
- self, self.distribution.get_option_dict('easy_install')
- )
-
- def delete_blockers(self, blockers):
- for filename in blockers:
- if os.path.exists(filename) or os.path.islink(filename):
- log.info("Deleting %s", filename)
- if not self.dry_run:
- if os.path.isdir(filename) and not os.path.islink(filename):
- rmtree(filename)
- else:
- os.unlink(filename)
-
- def finalize_options(self):
- if self.version:
- print 'distribute %s' % get_distribution('distribute').version
- sys.exit()
-
- py_version = sys.version.split()[0]
- prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
-
- self.config_vars = {'dist_name': self.distribution.get_name(),
- 'dist_version': self.distribution.get_version(),
- 'dist_fullname': self.distribution.get_fullname(),
- 'py_version': py_version,
- 'py_version_short': py_version[0:3],
- 'py_version_nodot': py_version[0] + py_version[2],
- 'sys_prefix': prefix,
- 'prefix': prefix,
- 'sys_exec_prefix': exec_prefix,
- 'exec_prefix': exec_prefix,
- }
-
- if HAS_USER_SITE:
- self.config_vars['userbase'] = self.install_userbase
- self.config_vars['usersite'] = self.install_usersite
-
- # fix the install_dir if "--user" was used
- #XXX: duplicate of the code in the setup command
- if self.user and HAS_USER_SITE:
- self.create_home_path()
- if self.install_userbase is None:
- raise DistutilsPlatformError(
- "User base directory is not specified")
- self.install_base = self.install_platbase = self.install_userbase
- if os.name == 'posix':
- self.select_scheme("unix_user")
- else:
- self.select_scheme(os.name + "_user")
-
- self.expand_basedirs()
- self.expand_dirs()
-
- self._expand('install_dir','script_dir','build_directory','site_dirs')
- # If a non-default installation directory was specified, default the
- # script directory to match it.
- if self.script_dir is None:
- self.script_dir = self.install_dir
-
- if self.no_find_links is None:
- self.no_find_links = False
-
- # Let install_dir get set by install_lib command, which in turn
- # gets its info from the install command, and takes into account
- # --prefix and --home and all that other crud.
- self.set_undefined_options('install_lib',
- ('install_dir','install_dir')
- )
- # Likewise, set default script_dir from 'install_scripts.install_dir'
- self.set_undefined_options('install_scripts',
- ('install_dir', 'script_dir')
- )
-
- if self.user and self.install_purelib:
- self.install_dir = self.install_purelib
- self.script_dir = self.install_scripts
- # default --record from the install command
- self.set_undefined_options('install', ('record', 'record'))
- normpath = map(normalize_path, sys.path)
- self.all_site_dirs = get_site_dirs()
- if self.site_dirs is not None:
- site_dirs = [
- os.path.expanduser(s.strip()) for s in self.site_dirs.split(',')
- ]
- for d in site_dirs:
- if not os.path.isdir(d):
- log.warn("%s (in --site-dirs) does not exist", d)
- elif normalize_path(d) not in normpath:
- raise DistutilsOptionError(
- d+" (in --site-dirs) is not on sys.path"
- )
- else:
- self.all_site_dirs.append(normalize_path(d))
- if not self.editable: self.check_site_dir()
- self.index_url = self.index_url or "http://pypi.python.org/simple"
- self.shadow_path = self.all_site_dirs[:]
- for path_item in self.install_dir, normalize_path(self.script_dir):
- if path_item not in self.shadow_path:
- self.shadow_path.insert(0, path_item)
-
- if self.allow_hosts is not None:
- hosts = [s.strip() for s in self.allow_hosts.split(',')]
- else:
- hosts = ['*']
- if self.package_index is None:
- self.package_index = self.create_index(
- self.index_url, search_path = self.shadow_path, hosts=hosts,
- )
- self.local_index = Environment(self.shadow_path+sys.path)
-
- if self.find_links is not None:
- if isinstance(self.find_links, basestring):
- self.find_links = self.find_links.split()
- else:
- self.find_links = []
- if self.local_snapshots_ok:
- self.package_index.scan_egg_links(self.shadow_path+sys.path)
- if not self.no_find_links:
- self.package_index.add_find_links(self.find_links)
- self.set_undefined_options('install_lib', ('optimize','optimize'))
- if not isinstance(self.optimize,int):
- try:
- self.optimize = int(self.optimize)
- if not (0 <= self.optimize <= 2): raise ValueError
- except ValueError:
- raise DistutilsOptionError("--optimize must be 0, 1, or 2")
-
- if self.delete_conflicting and self.ignore_conflicts_at_my_risk:
- raise DistutilsOptionError(
- "Can't use both --delete-conflicting and "
- "--ignore-conflicts-at-my-risk at the same time"
- )
- if self.editable and not self.build_directory:
- raise DistutilsArgError(
- "Must specify a build directory (-b) when using --editable"
- )
- if not self.args:
- raise DistutilsArgError(
- "No urls, filenames, or requirements specified (see --help)")
-
- self.outputs = []
-
-
- def _expand_attrs(self, attrs):
- for attr in attrs:
- val = getattr(self, attr)
- if val is not None:
- if os.name == 'posix' or os.name == 'nt':
- val = os.path.expanduser(val)
- val = subst_vars(val, self.config_vars)
- setattr(self, attr, val)
-
- def expand_basedirs(self):
- """Calls `os.path.expanduser` on install_base, install_platbase and
- root."""
- self._expand_attrs(['install_base', 'install_platbase', 'root'])
-
- def expand_dirs(self):
- """Calls `os.path.expanduser` on install dirs."""
- self._expand_attrs(['install_purelib', 'install_platlib',
- 'install_lib', 'install_headers',
- 'install_scripts', 'install_data',])
-
- def run(self):
- if self.verbose != self.distribution.verbose:
- log.set_verbosity(self.verbose)
- try:
- for spec in self.args:
- self.easy_install(spec, not self.no_deps)
- if self.record:
- outputs = self.outputs
- if self.root: # strip any package prefix
- root_len = len(self.root)
- for counter in xrange(len(outputs)):
- outputs[counter] = outputs[counter][root_len:]
- from distutils import file_util
- self.execute(
- file_util.write_file, (self.record, outputs),
- "writing list of installed files to '%s'" %
- self.record
- )
- self.warn_deprecated_options()
- finally:
- log.set_verbosity(self.distribution.verbose)
-
- def pseudo_tempname(self):
- """Return a pseudo-tempname base in the install directory.
- This code is intentionally naive; if a malicious party can write to
- the target directory you're already in deep doodoo.
- """
- try:
- pid = os.getpid()
- except:
- pid = random.randint(0,sys.maxint)
- return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
-
- def warn_deprecated_options(self):
- if self.delete_conflicting or self.ignore_conflicts_at_my_risk:
- log.warn(
- "Note: The -D, --delete-conflicting and"
- " --ignore-conflicts-at-my-risk no longer have any purpose"
- " and should not be used."
- )
-
- def check_site_dir(self):
- """Verify that self.install_dir is .pth-capable dir, if needed"""
- print 'install_dir', self.install_dir
- instdir = normalize_path(self.install_dir)
- pth_file = os.path.join(instdir,'easy-install.pth')
-
- # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
- is_site_dir = instdir in self.all_site_dirs
-
- if not is_site_dir:
- # No? Then directly test whether it does .pth file processing
- is_site_dir = self.check_pth_processing()
- else:
- # make sure we can write to target dir
- testfile = self.pseudo_tempname()+'.write-test'
- test_exists = os.path.exists(testfile)
- try:
- if test_exists: os.unlink(testfile)
- open(testfile,'w').close()
- os.unlink(testfile)
- except (OSError,IOError):
- self.cant_write_to_target()
-
- if not is_site_dir and not self.multi_version:
- # Can't install non-multi to non-site dir
- raise DistutilsError(self.no_default_version_msg())
-
- if is_site_dir:
- if self.pth_file is None:
- self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
- else:
- self.pth_file = None
-
- PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep)
- if instdir not in map(normalize_path, filter(None,PYTHONPATH)):
- # only PYTHONPATH dirs need a site.py, so pretend it's there
- self.sitepy_installed = True
- elif self.multi_version and not os.path.exists(pth_file):
- self.sitepy_installed = True # don't need site.py in this case
- self.pth_file = None # and don't create a .pth file
- self.install_dir = instdir
-
- def cant_write_to_target(self):
- msg = """can't create or remove files in install directory
-
-The following error occurred while trying to add or remove files in the
-installation directory:
-
- %s
-
-The installation directory you specified (via --install-dir, --prefix, or
-the distutils default setting) was:
-
- %s
-""" % (sys.exc_info()[1], self.install_dir,)
-
- if not os.path.exists(self.install_dir):
- msg += """
-This directory does not currently exist. Please create it and try again, or
-choose a different installation directory (using the -d or --install-dir
-option).
-"""
- else:
- msg += """
-Perhaps your account does not have write access to this directory? If the
-installation directory is a system-owned directory, you may need to sign in
-as the administrator or "root" account. If you do not have administrative
-access to this machine, you may wish to choose a different installation
-directory, preferably one that is listed in your PYTHONPATH environment
-variable.
-
-For information on other options, you may wish to consult the
-documentation at:
-
- http://packages.python.org/distribute/easy_install.html
-
-Please make the appropriate changes for your system and try again.
-"""
- raise DistutilsError(msg)
-
-
-
-
- def check_pth_processing(self):
- """Empirically verify whether .pth files are supported in inst. dir"""
- instdir = self.install_dir
- log.info("Checking .pth file support in %s", instdir)
- pth_file = self.pseudo_tempname()+".pth"
- ok_file = pth_file+'.ok'
- ok_exists = os.path.exists(ok_file)
- try:
- if ok_exists: os.unlink(ok_file)
- dirname = os.path.dirname(ok_file)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
- f = open(pth_file,'w')
- except (OSError,IOError):
- self.cant_write_to_target()
- else:
- try:
- f.write("import os;open(%r,'w').write('OK')\n" % (ok_file,))
- f.close(); f=None
- executable = sys.executable
- if os.name=='nt':
- dirname,basename = os.path.split(executable)
- alt = os.path.join(dirname,'pythonw.exe')
- if basename.lower()=='python.exe' and os.path.exists(alt):
- # use pythonw.exe to avoid opening a console window
- executable = alt
-
- from distutils.spawn import spawn
- spawn([executable,'-E','-c','pass'],0)
-
- if os.path.exists(ok_file):
- log.info(
- "TEST PASSED: %s appears to support .pth files",
- instdir
- )
- return True
- finally:
- if f: f.close()
- if os.path.exists(ok_file): os.unlink(ok_file)
- if os.path.exists(pth_file): os.unlink(pth_file)
- if not self.multi_version:
- log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
- return False
-
- def install_egg_scripts(self, dist):
- """Write all the scripts for `dist`, unless scripts are excluded"""
- if not self.exclude_scripts and dist.metadata_isdir('scripts'):
- for script_name in dist.metadata_listdir('scripts'):
- self.install_script(
- dist, script_name,
- dist.get_metadata('scripts/'+script_name)
- )
- self.install_wrapper_scripts(dist)
-
- def add_output(self, path):
- if os.path.isdir(path):
- for base, dirs, files in os.walk(path):
- for filename in files:
- self.outputs.append(os.path.join(base,filename))
- else:
- self.outputs.append(path)
-
- def not_editable(self, spec):
- if self.editable:
- raise DistutilsArgError(
- "Invalid argument %r: you can't use filenames or URLs "
- "with --editable (except via the --find-links option)."
- % (spec,)
- )
-
- def check_editable(self,spec):
- if not self.editable:
- return
-
- if os.path.exists(os.path.join(self.build_directory, spec.key)):
- raise DistutilsArgError(
- "%r already exists in %s; can't do a checkout there" %
- (spec.key, self.build_directory)
- )
-
-
-
-
-
-
- def easy_install(self, spec, deps=False):
- tmpdir = tempfile.mkdtemp(prefix="easy_install-")
- download = None
- if not self.editable: self.install_site_py()
-
- try:
- if not isinstance(spec,Requirement):
- if URL_SCHEME(spec):
- # It's a url, download it to tmpdir and process
- self.not_editable(spec)
- download = self.package_index.download(spec, tmpdir)
- return self.install_item(None, download, tmpdir, deps, True)
-
- elif os.path.exists(spec):
- # Existing file or directory, just process it directly
- self.not_editable(spec)
- return self.install_item(None, spec, tmpdir, deps, True)
- else:
- spec = parse_requirement_arg(spec)
-
- self.check_editable(spec)
- dist = self.package_index.fetch_distribution(
- spec, tmpdir, self.upgrade, self.editable, not self.always_copy,
- self.local_index
- )
-
- if dist is None:
- msg = "Could not find suitable distribution for %r" % spec
- if self.always_copy:
- msg+=" (--always-copy skips system and development eggs)"
- raise DistutilsError(msg)
- elif dist.precedence==DEVELOP_DIST:
- # .egg-info dists don't need installing, just process deps
- self.process_distribution(spec, dist, deps, "Using")
- return dist
- else:
- return self.install_item(spec, dist.location, tmpdir, deps)
-
- finally:
- if os.path.exists(tmpdir):
- rmtree(tmpdir)
-
- def install_item(self, spec, download, tmpdir, deps, install_needed=False):
-
- # Installation is also needed if file in tmpdir or is not an egg
- install_needed = install_needed or self.always_copy
- install_needed = install_needed or os.path.dirname(download) == tmpdir
- install_needed = install_needed or not download.endswith('.egg')
- install_needed = install_needed or (
- self.always_copy_from is not None and
- os.path.dirname(normalize_path(download)) ==
- normalize_path(self.always_copy_from)
- )
-
- if spec and not install_needed:
- # at this point, we know it's a local .egg, we just don't know if
- # it's already installed.
- for dist in self.local_index[spec.project_name]:
- if dist.location==download:
- break
- else:
- install_needed = True # it's not in the local index
-
- log.info("Processing %s", os.path.basename(download))
-
- if install_needed:
- dists = self.install_eggs(spec, download, tmpdir)
- for dist in dists:
- self.process_distribution(spec, dist, deps)
- else:
- dists = [self.check_conflicts(self.egg_distribution(download))]
- self.process_distribution(spec, dists[0], deps, "Using")
-
- if spec is not None:
- for dist in dists:
- if dist in spec:
- return dist
-
-
-
- def select_scheme(self, name):
- """Sets the install directories by applying the install schemes."""
- # it's the caller's problem if they supply a bad name!
- scheme = INSTALL_SCHEMES[name]
- for key in SCHEME_KEYS:
- attrname = 'install_' + key
- if getattr(self, attrname) is None:
- setattr(self, attrname, scheme[key])
-
-
-
-
- def process_distribution(self, requirement, dist, deps=True, *info):
- self.update_pth(dist)
- self.package_index.add(dist)
- self.local_index.add(dist)
- if not self.editable:
- self.install_egg_scripts(dist)
- self.installed_projects[dist.key] = dist
- log.info(self.installation_report(requirement, dist, *info))
- if (dist.has_metadata('dependency_links.txt') and
- not self.no_find_links):
- self.package_index.add_find_links(
- dist.get_metadata_lines('dependency_links.txt')
- )
- if not deps and not self.always_copy:
- return
- elif requirement is not None and dist.key != requirement.key:
- log.warn("Skipping dependencies for %s", dist)
- return # XXX this is not the distribution we were looking for
- elif requirement is None or dist not in requirement:
- # if we wound up with a different version, resolve what we've got
- distreq = dist.as_requirement()
- requirement = requirement or distreq
- requirement = Requirement(
- distreq.project_name, distreq.specs, requirement.extras
- )
- log.info("Processing dependencies for %s", requirement)
- try:
- distros = WorkingSet([]).resolve(
- [requirement], self.local_index, self.easy_install
- )
- except DistributionNotFound, e:
- raise DistutilsError(
- "Could not find required distribution %s" % e.args
- )
- except VersionConflict, e:
- raise DistutilsError(
- "Installed distribution %s conflicts with requirement %s"
- % e.args
- )
- if self.always_copy or self.always_copy_from:
- # Force all the relevant distros to be copied or activated
- for dist in distros:
- if dist.key not in self.installed_projects:
- self.easy_install(dist.as_requirement())
- log.info("Finished processing dependencies for %s", requirement)
-
- def should_unzip(self, dist):
- if self.zip_ok is not None:
- return not self.zip_ok
- if dist.has_metadata('not-zip-safe'):
- return True
- if not dist.has_metadata('zip-safe'):
- return True
- return True
-
- def maybe_move(self, spec, dist_filename, setup_base):
- dst = os.path.join(self.build_directory, spec.key)
- if os.path.exists(dst):
- log.warn(
- "%r already exists in %s; build directory %s will not be kept",
- spec.key, self.build_directory, setup_base
- )
- return setup_base
- if os.path.isdir(dist_filename):
- setup_base = dist_filename
- else:
- if os.path.dirname(dist_filename)==setup_base:
- os.unlink(dist_filename) # get it out of the tmp dir
- contents = os.listdir(setup_base)
- if len(contents)==1:
- dist_filename = os.path.join(setup_base,contents[0])
- if os.path.isdir(dist_filename):
- # if the only thing there is a directory, move it instead
- setup_base = dist_filename
- ensure_directory(dst); shutil.move(setup_base, dst)
- return dst
-
- def install_wrapper_scripts(self, dist):
- if not self.exclude_scripts:
- for args in get_script_args(dist):
- self.write_script(*args)
-
-
-
- def install_script(self, dist, script_name, script_text, dev_path=None):
- """Generate a legacy script wrapper and install it"""
- spec = str(dist.as_requirement())
- is_script = is_python_script(script_text, script_name)
-
- if is_script and dev_path:
- script_text = get_script_header(script_text) + (
- "# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r\n"
- "__requires__ = %(spec)r\n"
- "from pkg_resources import require; require(%(spec)r)\n"
- "del require\n"
- "__file__ = %(dev_path)r\n"
- "execfile(__file__)\n"
- ) % locals()
- elif is_script:
- script_text = get_script_header(script_text) + (
- "# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r\n"
- "__requires__ = %(spec)r\n"
- "import pkg_resources\n"
- "pkg_resources.run_script(%(spec)r, %(script_name)r)\n"
- ) % locals()
- self.write_script(script_name, _to_ascii(script_text), 'b')
-
- def write_script(self, script_name, contents, mode="t", blockers=()):
- """Write an executable file to the scripts directory"""
- self.delete_blockers( # clean up old .py/.pyw w/o a script
- [os.path.join(self.script_dir,x) for x in blockers])
- log.info("Installing %s script to %s", script_name, self.script_dir)
- target = os.path.join(self.script_dir, script_name)
- self.add_output(target)
-
- if not self.dry_run:
- ensure_directory(target)
- f = open(target,"w"+mode)
- f.write(contents)
- f.close()
- chmod(target,0755)
-
-
-
-
- def install_eggs(self, spec, dist_filename, tmpdir):
- # .egg dirs or files are already built, so just return them
- if dist_filename.lower().endswith('.egg'):
- return [self.install_egg(dist_filename, tmpdir)]
- elif dist_filename.lower().endswith('.exe'):
- return [self.install_exe(dist_filename, tmpdir)]
-
- # Anything else, try to extract and build
- setup_base = tmpdir
- if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
- unpack_archive(dist_filename, tmpdir, self.unpack_progress)
- elif os.path.isdir(dist_filename):
- setup_base = os.path.abspath(dist_filename)
-
- if (setup_base.startswith(tmpdir) # something we downloaded
- and self.build_directory and spec is not None
- ):
- setup_base = self.maybe_move(spec, dist_filename, setup_base)
-
- # Find the setup.py file
- setup_script = os.path.join(setup_base, 'setup.py')
-
- if not os.path.exists(setup_script):
- setups = glob(os.path.join(setup_base, '*', 'setup.py'))
- if not setups:
- raise DistutilsError(
- "Couldn't find a setup script in %s" % os.path.abspath(dist_filename)
- )
- if len(setups)>1:
- raise DistutilsError(
- "Multiple setup scripts in %s" % os.path.abspath(dist_filename)
- )
- setup_script = setups[0]
-
- # Now run it, and return the result
- if self.editable:
- log.info(self.report_editable(spec, setup_script))
- return []
- else:
- return self.build_and_install(setup_script, setup_base)
-
- def egg_distribution(self, egg_path):
- if os.path.isdir(egg_path):
- metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO'))
- else:
- metadata = EggMetadata(zipimport.zipimporter(egg_path))
- return Distribution.from_filename(egg_path,metadata=metadata)
-
- def install_egg(self, egg_path, tmpdir):
- destination = os.path.join(self.install_dir,os.path.basename(egg_path))
- destination = os.path.abspath(destination)
- if not self.dry_run:
- ensure_directory(destination)
-
- dist = self.egg_distribution(egg_path)
- self.check_conflicts(dist)
- if not samefile(egg_path, destination):
- if os.path.isdir(destination) and not os.path.islink(destination):
- dir_util.remove_tree(destination, dry_run=self.dry_run)
- elif os.path.exists(destination):
- self.execute(os.unlink,(destination,),"Removing "+destination)
- uncache_zipdir(destination)
- if os.path.isdir(egg_path):
- if egg_path.startswith(tmpdir):
- f,m = shutil.move, "Moving"
- else:
- f,m = shutil.copytree, "Copying"
- elif self.should_unzip(dist):
- self.mkpath(destination)
- f,m = self.unpack_and_compile, "Extracting"
- elif egg_path.startswith(tmpdir):
- f,m = shutil.move, "Moving"
- else:
- f,m = shutil.copy2, "Copying"
-
- self.execute(f, (egg_path, destination),
- (m+" %s to %s") %
- (os.path.basename(egg_path),os.path.dirname(destination)))
-
- self.add_output(destination)
- return self.egg_distribution(destination)
-
- def install_exe(self, dist_filename, tmpdir):
- # See if it's valid, get data
- cfg = extract_wininst_cfg(dist_filename)
- if cfg is None:
- raise DistutilsError(
- "%s is not a valid distutils Windows .exe" % dist_filename
- )
- # Create a dummy distribution object until we build the real distro
- dist = Distribution(None,
- project_name=cfg.get('metadata','name'),
- version=cfg.get('metadata','version'), platform="win32"
- )
-
- # Convert the .exe to an unpacked egg
- egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg')
- egg_tmp = egg_path+'.tmp'
- egg_info = os.path.join(egg_tmp, 'EGG-INFO')
- pkg_inf = os.path.join(egg_info, 'PKG-INFO')
- ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
- dist._provider = PathMetadata(egg_tmp, egg_info) # XXX
- self.exe_to_egg(dist_filename, egg_tmp)
-
- # Write EGG-INFO/PKG-INFO
- if not os.path.exists(pkg_inf):
- f = open(pkg_inf,'w')
- f.write('Metadata-Version: 1.0\n')
- for k,v in cfg.items('metadata'):
- if k<>'target_version':
- f.write('%s: %s\n' % (k.replace('_','-').title(), v))
- f.close()
- script_dir = os.path.join(egg_info,'scripts')
- self.delete_blockers( # delete entry-point scripts to avoid duping
- [os.path.join(script_dir,args[0]) for args in get_script_args(dist)]
- )
- # Build .egg file from tmpdir
- bdist_egg.make_zipfile(
- egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
- )
- # install the .egg
- return self.install_egg(egg_path, tmpdir)
-
- def exe_to_egg(self, dist_filename, egg_tmp):
- """Extract a bdist_wininst to the directories an egg would use"""
- # Check for .pth file and set up prefix translations
- prefixes = get_exe_prefixes(dist_filename)
- to_compile = []
- native_libs = []
- top_level = {}
- def process(src,dst):
- s = src.lower()
- for old,new in prefixes:
- if s.startswith(old):
- src = new+src[len(old):]
- parts = src.split('/')
- dst = os.path.join(egg_tmp, *parts)
- dl = dst.lower()
- if dl.endswith('.pyd') or dl.endswith('.dll'):
- parts[-1] = bdist_egg.strip_module(parts[-1])
- top_level[os.path.splitext(parts[0])[0]] = 1
- native_libs.append(src)
- elif dl.endswith('.py') and old!='SCRIPTS/':
- top_level[os.path.splitext(parts[0])[0]] = 1
- to_compile.append(dst)
- return dst
- if not src.endswith('.pth'):
- log.warn("WARNING: can't process %s", src)
- return None
- # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
- unpack_archive(dist_filename, egg_tmp, process)
- stubs = []
- for res in native_libs:
- if res.lower().endswith('.pyd'): # create stubs for .pyd's
- parts = res.split('/')
- resource = parts[-1]
- parts[-1] = bdist_egg.strip_module(parts[-1])+'.py'
- pyfile = os.path.join(egg_tmp, *parts)
- to_compile.append(pyfile); stubs.append(pyfile)
- bdist_egg.write_stub(resource, pyfile)
- self.byte_compile(to_compile) # compile .py's
- bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'),
- bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
-
- for name in 'top_level','native_libs':
- if locals()[name]:
- txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt')
- if not os.path.exists(txt):
- f = open(txt,'w')
- f.write('\n'.join(locals()[name])+'\n')
- f.close()
-
- def check_conflicts(self, dist):
- """Verify that there are no conflicting "old-style" packages"""
-
- return dist # XXX temporarily disable until new strategy is stable
- from imp import find_module, get_suffixes
- from glob import glob
-
- blockers = []
- names = dict.fromkeys(dist._get_metadata('top_level.txt')) # XXX private attr
-
- exts = {'.pyc':1, '.pyo':1} # get_suffixes() might leave one out
- for ext,mode,typ in get_suffixes():
- exts[ext] = 1
-
- for path,files in expand_paths([self.install_dir]+self.all_site_dirs):
- for filename in files:
- base,ext = os.path.splitext(filename)
- if base in names:
- if not ext:
- # no extension, check for package
- try:
- f, filename, descr = find_module(base, [path])
- except ImportError:
- continue
- else:
- if f: f.close()
- if filename not in blockers:
- blockers.append(filename)
- elif ext in exts and base!='site': # XXX ugh
- blockers.append(os.path.join(path,filename))
- if blockers:
- self.found_conflicts(dist, blockers)
-
- return dist
-
- def found_conflicts(self, dist, blockers):
- if self.delete_conflicting:
- log.warn("Attempting to delete conflicting packages:")
- return self.delete_blockers(blockers)
-
- msg = """\
--------------------------------------------------------------------------
-CONFLICT WARNING:
-
-The following modules or packages have the same names as modules or
-packages being installed, and will be *before* the installed packages in
-Python's search path. You MUST remove all of the relevant files and
-directories before you will be able to use the package(s) you are
-installing:
-
- %s
-
-""" % '\n '.join(blockers)
-
- if self.ignore_conflicts_at_my_risk:
- msg += """\
-(Note: you can run EasyInstall on '%s' with the
---delete-conflicting option to attempt deletion of the above files
-and/or directories.)
-""" % dist.project_name
- else:
- msg += """\
-Note: you can attempt this installation again with EasyInstall, and use
-either the --delete-conflicting (-D) option or the
---ignore-conflicts-at-my-risk option, to either delete the above files
-and directories, or to ignore the conflicts, respectively. Note that if
-you ignore the conflicts, the installed package(s) may not work.
-"""
- msg += """\
--------------------------------------------------------------------------
-"""
- sys.stderr.write(msg)
- sys.stderr.flush()
- if not self.ignore_conflicts_at_my_risk:
- raise DistutilsError("Installation aborted due to conflicts")
-
- def installation_report(self, req, dist, what="Installed"):
- """Helpful installation message for display to package users"""
- msg = "\n%(what)s %(eggloc)s%(extras)s"
- if self.multi_version and not self.no_report:
- msg += """
-
-Because this distribution was installed --multi-version, before you can
-import modules from this package in an application, you will need to
-'import pkg_resources' and then use a 'require()' call similar to one of
-these examples, in order to select the desired version:
-
- pkg_resources.require("%(name)s") # latest installed version
- pkg_resources.require("%(name)s==%(version)s") # this exact version
- pkg_resources.require("%(name)s>=%(version)s") # this version or higher
-"""
- if self.install_dir not in map(normalize_path,sys.path):
- msg += """
-
-Note also that the installation directory must be on sys.path at runtime for
-this to work. (e.g. by being the application's script directory, by being on
-PYTHONPATH, or by being added to sys.path by your code.)
-"""
- eggloc = dist.location
- name = dist.project_name
- version = dist.version
- extras = '' # TODO: self.report_extras(req, dist)
- return msg % locals()
-
- def report_editable(self, spec, setup_script):
- dirname = os.path.dirname(setup_script)
- python = sys.executable
- return """\nExtracted editable version of %(spec)s to %(dirname)s
-
-If it uses setuptools in its setup script, you can activate it in
-"development" mode by going to that directory and running::
-
- %(python)s setup.py develop
-
-See the setuptools documentation for the "develop" command for more info.
-""" % locals()
-
- def run_setup(self, setup_script, setup_base, args):
- sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
- sys.modules.setdefault('distutils.command.egg_info', egg_info)
-
- args = list(args)
- if self.verbose>2:
- v = 'v' * (self.verbose - 1)
- args.insert(0,'-'+v)
- elif self.verbose<2:
- args.insert(0,'-q')
- if self.dry_run:
- args.insert(0,'-n')
- log.info(
- "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args)
- )
- try:
- run_setup(setup_script, args)
- except SystemExit, v:
- raise DistutilsError("Setup script exited with %s" % (v.args[0],))
-
- def build_and_install(self, setup_script, setup_base):
- args = ['bdist_egg', '--dist-dir']
- dist_dir = tempfile.mkdtemp(
- prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
- )
- try:
- args.append(dist_dir)
- self.run_setup(setup_script, setup_base, args)
- all_eggs = Environment([dist_dir])
- eggs = []
- for key in all_eggs:
- for dist in all_eggs[key]:
- eggs.append(self.install_egg(dist.location, setup_base))
- if not eggs and not self.dry_run:
- log.warn("No eggs found in %s (setup script problem?)",
- dist_dir)
- return eggs
- finally:
- rmtree(dist_dir)
- log.set_verbosity(self.verbose) # restore our log verbosity
-
- def update_pth(self,dist):
- if self.pth_file is None:
- return
-
- for d in self.pth_file[dist.key]: # drop old entries
- if self.multi_version or d.location != dist.location:
- log.info("Removing %s from easy-install.pth file", d)
- self.pth_file.remove(d)
- if d.location in self.shadow_path:
- self.shadow_path.remove(d.location)
-
- if not self.multi_version:
- if dist.location in self.pth_file.paths:
- log.info(
- "%s is already the active version in easy-install.pth",
- dist
- )
- else:
- log.info("Adding %s to easy-install.pth file", dist)
- self.pth_file.add(dist) # add new entry
- if dist.location not in self.shadow_path:
- self.shadow_path.append(dist.location)
-
- if not self.dry_run:
-
- self.pth_file.save()
- if dist.key=='distribute':
- # Ensure that setuptools itself never becomes unavailable!
- # XXX should this check for latest version?
- filename = os.path.join(self.install_dir,'setuptools.pth')
- if os.path.islink(filename): os.unlink(filename)
- f = open(filename, 'wt')
- f.write(self.pth_file.make_relative(dist.location)+'\n')
- f.close()
-
- def unpack_progress(self, src, dst):
- # Progress filter for unpacking
- log.debug("Unpacking %s to %s", src, dst)
- return dst # only unpack-and-compile skips files for dry run
-
- def unpack_and_compile(self, egg_path, destination):
- to_compile = []; to_chmod = []
-
- def pf(src,dst):
- if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
- to_compile.append(dst)
- to_chmod.append(dst)
- elif dst.endswith('.dll') or dst.endswith('.so'):
- to_chmod.append(dst)
- self.unpack_progress(src,dst)
- return not self.dry_run and dst or None
-
- unpack_archive(egg_path, destination, pf)
- self.byte_compile(to_compile)
- if not self.dry_run:
- for f in to_chmod:
- mode = ((os.stat(f)[stat.ST_MODE]) | 0555) & 07755
- chmod(f, mode)
-
- def byte_compile(self, to_compile):
- if _dont_write_bytecode:
- self.warn('byte-compiling is disabled, skipping.')
- return
-
- from distutils.util import byte_compile
- try:
- # try to make the byte compile messages quieter
- log.set_verbosity(self.verbose - 1)
-
- byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
- if self.optimize:
- byte_compile(
- to_compile, optimize=self.optimize, force=1,
- dry_run=self.dry_run
- )
- finally:
- log.set_verbosity(self.verbose) # restore original verbosity
-
-
-
-
-
-
-
-
- def no_default_version_msg(self):
- return """bad install directory or PYTHONPATH
-
-You are attempting to install a package to a directory that is not
-on PYTHONPATH and which Python does not read ".pth" files from. The
-installation directory you specified (via --install-dir, --prefix, or
-the distutils default setting) was:
-
- %s
-
-and your PYTHONPATH environment variable currently contains:
-
- %r
-
-Here are some of your options for correcting the problem:
-
-* You can choose a different installation directory, i.e., one that is
- on PYTHONPATH or supports .pth files
-
-* You can add the installation directory to the PYTHONPATH environment
- variable. (It must then also be on PYTHONPATH whenever you run
- Python and want to use the package(s) you are installing.)
-
-* You can set up the installation directory to support ".pth" files by
- using one of the approaches described here:
-
- http://packages.python.org/distribute/easy_install.html#custom-installation-locations
-
-Please make the appropriate changes for your system and try again.""" % (
- self.install_dir, os.environ.get('PYTHONPATH','')
- )
-
-
-
-
-
-
-
-
-
-
- def install_site_py(self):
- """Make sure there's a site.py in the target dir, if needed"""
-
- if self.sitepy_installed:
- return # already did it, or don't need to
-
- sitepy = os.path.join(self.install_dir, "site.py")
- source = resource_string(Requirement.parse("distribute"), "site.py")
- current = ""
-
- if os.path.exists(sitepy):
- log.debug("Checking existing site.py in %s", self.install_dir)
- f = open(sitepy,'rb')
- current = f.read()
- # we want str, not bytes
- if sys.version_info >= (3,):
- current = current.decode()
-
- f.close()
- if not current.startswith('def __boot():'):
- raise DistutilsError(
- "%s is not a setuptools-generated site.py; please"
- " remove it." % sitepy
- )
-
- if current != source:
- log.info("Creating %s", sitepy)
- if not self.dry_run:
- ensure_directory(sitepy)
- f = open(sitepy,'wb')
- f.write(source)
- f.close()
- self.byte_compile([sitepy])
-
- self.sitepy_installed = True
-
-
-
-
- def create_home_path(self):
- """Create directories under ~."""
- if not self.user:
- return
- home = convert_path(os.path.expanduser("~"))
- for name, path in self.config_vars.iteritems():
- if path.startswith(home) and not os.path.isdir(path):
- self.debug_print("os.makedirs('%s', 0700)" % path)
- os.makedirs(path, 0700)
-
-
-
-
-
-
-
- INSTALL_SCHEMES = dict(
- posix = dict(
- install_dir = '$base/lib/python$py_version_short/site-packages',
- script_dir = '$base/bin',
- ),
- )
-
- DEFAULT_SCHEME = dict(
- install_dir = '$base/Lib/site-packages',
- script_dir = '$base/Scripts',
- )
-
- def _expand(self, *attrs):
- config_vars = self.get_finalized_command('install').config_vars
-
- if self.prefix:
- # Set default install_dir/scripts from --prefix
- config_vars = config_vars.copy()
- config_vars['base'] = self.prefix
- scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME)
- for attr,val in scheme.items():
- if getattr(self,attr,None) is None:
- setattr(self,attr,val)
-
- from distutils.util import subst_vars
- for attr in attrs:
- val = getattr(self, attr)
- if val is not None:
- val = subst_vars(val, config_vars)
- if os.name == 'posix':
- val = os.path.expanduser(val)
- setattr(self, attr, val)
-
-
-
-
-
-
-
-
-
-def get_site_dirs():
- # return a list of 'site' dirs
- sitedirs = filter(None,os.environ.get('PYTHONPATH','').split(os.pathsep))
- prefixes = [sys.prefix]
- if sys.exec_prefix != sys.prefix:
- prefixes.append(sys.exec_prefix)
- for prefix in prefixes:
- if prefix:
- if sys.platform in ('os2emx', 'riscos'):
- sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
- elif os.sep == '/':
- sitedirs.extend([os.path.join(prefix,
- "lib",
- "python" + sys.version[:3],
- "site-packages"),
- os.path.join(prefix, "lib", "site-python")])
- else:
- sitedirs.extend(
- [prefix, os.path.join(prefix, "lib", "site-packages")]
- )
- if sys.platform == 'darwin':
- # for framework builds *only* we add the standard Apple
- # locations. Currently only per-user, but /Library and
- # /Network/Library could be added too
- if 'Python.framework' in prefix:
- home = os.environ.get('HOME')
- if home:
- sitedirs.append(
- os.path.join(home,
- 'Library',
- 'Python',
- sys.version[:3],
- 'site-packages'))
- for plat_specific in (0,1):
- site_lib = get_python_lib(plat_specific)
- if site_lib not in sitedirs: sitedirs.append(site_lib)
-
- if HAS_USER_SITE:
- sitedirs.append(site.USER_SITE)
-
- sitedirs = map(normalize_path, sitedirs)
-
- return sitedirs
-
-
-def expand_paths(inputs):
- """Yield sys.path directories that might contain "old-style" packages"""
-
- seen = {}
-
- for dirname in inputs:
- dirname = normalize_path(dirname)
- if dirname in seen:
- continue
-
- seen[dirname] = 1
- if not os.path.isdir(dirname):
- continue
-
- files = os.listdir(dirname)
- yield dirname, files
-
- for name in files:
- if not name.endswith('.pth'):
- # We only care about the .pth files
- continue
- if name in ('easy-install.pth','setuptools.pth'):
- # Ignore .pth files that we control
- continue
-
- # Read the .pth file
- f = open(os.path.join(dirname,name))
- lines = list(yield_lines(f))
- f.close()
-
- # Yield existing non-dupe, non-import directory lines from it
- for line in lines:
- if not line.startswith("import"):
- line = normalize_path(line.rstrip())
- if line not in seen:
- seen[line] = 1
- if not os.path.isdir(line):
- continue
- yield line, os.listdir(line)
-
-
-def extract_wininst_cfg(dist_filename):
- """Extract configuration data from a bdist_wininst .exe
-
- Returns a ConfigParser.RawConfigParser, or None
- """
- f = open(dist_filename,'rb')
- try:
- endrec = zipfile._EndRecData(f)
- if endrec is None:
- return None
-
- prepended = (endrec[9] - endrec[5]) - endrec[6]
- if prepended < 12: # no wininst data here
- return None
- f.seek(prepended-12)
-
- import struct, StringIO, ConfigParser
- tag, cfglen, bmlen = struct.unpack("<iii",f.read(12))
- if tag not in (0x1234567A, 0x1234567B):
- return None # not a valid tag
-
- f.seek(prepended-(12+cfglen))
- cfg = ConfigParser.RawConfigParser({'version':'','target_version':''})
- try:
- cfg.readfp(StringIO.StringIO(f.read(cfglen).split(chr(0),1)[0]))
- except ConfigParser.Error:
- return None
- if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
- return None
- return cfg
-
- finally:
- f.close()
-
-
-
-
-
-
-
-
-def get_exe_prefixes(exe_filename):
- """Get exe->egg path translations for a given .exe file"""
-
- prefixes = [
- ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''),
- ('PLATLIB/', ''),
- ('SCRIPTS/', 'EGG-INFO/scripts/')
- ]
- z = zipfile.ZipFile(exe_filename)
- try:
- for info in z.infolist():
- name = info.filename
- parts = name.split('/')
- if len(parts)==3 and parts[2]=='PKG-INFO':
- if parts[1].endswith('.egg-info'):
- prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/'))
- break
- if len(parts)<>2 or not name.endswith('.pth'):
- continue
- if name.endswith('-nspkg.pth'):
- continue
- if parts[0].upper() in ('PURELIB','PLATLIB'):
- for pth in yield_lines(z.read(name)):
- pth = pth.strip().replace('\\','/')
- if not pth.startswith('import'):
- prefixes.append((('%s/%s/' % (parts[0],pth)), ''))
- finally:
- z.close()
- prefixes = [(x.lower(),y) for x, y in prefixes]
- prefixes.sort(); prefixes.reverse()
- return prefixes
-
-
-def parse_requirement_arg(spec):
- try:
- return Requirement.parse(spec)
- except ValueError:
- raise DistutilsError(
- "Not a URL, existing file, or requirement spec: %r" % (spec,)
- )
-
-class PthDistributions(Environment):
- """A .pth file with Distribution paths in it"""
-
- dirty = False
-
- def __init__(self, filename, sitedirs=()):
- self.filename = filename; self.sitedirs=map(normalize_path, sitedirs)
- self.basedir = normalize_path(os.path.dirname(self.filename))
- self._load(); Environment.__init__(self, [], None, None)
- for path in yield_lines(self.paths):
- map(self.add, find_distributions(path, True))
-
- def _load(self):
- self.paths = []
- saw_import = False
- seen = dict.fromkeys(self.sitedirs)
- if os.path.isfile(self.filename):
- f = open(self.filename,'rt')
- for line in f:
- if line.startswith('import'):
- saw_import = True
- continue
- path = line.rstrip()
- self.paths.append(path)
- if not path.strip() or path.strip().startswith('#'):
- continue
- # skip non-existent paths, in case somebody deleted a package
- # manually, and duplicate paths as well
- path = self.paths[-1] = normalize_path(
- os.path.join(self.basedir,path)
- )
- if not os.path.exists(path) or path in seen:
- self.paths.pop() # skip it
- self.dirty = True # we cleaned up, so we're dirty now :)
- continue
- seen[path] = 1
- f.close()
-
- if self.paths and not saw_import:
- self.dirty = True # ensure anything we touch has import wrappers
- while self.paths and not self.paths[-1].strip():
- self.paths.pop()
-
- def save(self):
- """Write changed .pth file back to disk"""
- if not self.dirty:
- return
-
- data = '\n'.join(map(self.make_relative,self.paths))
- if data:
- log.debug("Saving %s", self.filename)
- data = (
- "import sys; sys.__plen = len(sys.path)\n"
- "%s\n"
- "import sys; new=sys.path[sys.__plen:];"
- " del sys.path[sys.__plen:];"
- " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;"
- " sys.__egginsert = p+len(new)\n"
- ) % data
-
- if os.path.islink(self.filename):
- os.unlink(self.filename)
- f = open(self.filename,'wt')
- f.write(data); f.close()
-
- elif os.path.exists(self.filename):
- log.debug("Deleting empty %s", self.filename)
- os.unlink(self.filename)
-
- self.dirty = False
-
- def add(self,dist):
- """Add `dist` to the distribution map"""
- if (dist.location not in self.paths and (
- dist.location not in self.sitedirs or
- dist.location == os.getcwd() #account for '.' being in PYTHONPATH
- )):
- self.paths.append(dist.location)
- self.dirty = True
- Environment.add(self,dist)
-
- def remove(self,dist):
- """Remove `dist` from the distribution map"""
- while dist.location in self.paths:
- self.paths.remove(dist.location); self.dirty = True
- Environment.remove(self,dist)
-
-
- def make_relative(self,path):
- npath, last = os.path.split(normalize_path(path))
- baselen = len(self.basedir)
- parts = [last]
- sep = os.altsep=='/' and '/' or os.sep
- while len(npath)>=baselen:
- if npath==self.basedir:
- parts.append(os.curdir)
- parts.reverse()
- return sep.join(parts)
- npath, last = os.path.split(npath)
- parts.append(last)
- else:
- return path
-
-def get_script_header(script_text, executable=sys_executable, wininst=False):
- """Create a #! line, getting options (if any) from script_text"""
- from distutils.command.build_scripts import first_line_re
- first = (script_text+'\n').splitlines()[0]
- match = first_line_re.match(first)
- options = ''
- if match:
- options = match.group(1) or ''
- if options: options = ' '+options
- if wininst:
- executable = "python.exe"
- else:
- executable = nt_quote_arg(executable)
- hdr = "#!%(executable)s%(options)s\n" % locals()
- if not isascii(hdr):
- # Non-ascii path to sys.executable, use -x to prevent warnings
- if options:
- if options.strip().startswith('-'):
- options = ' -x'+options.strip()[1:]
- # else: punt, we can't do it, let the warning happen anyway
- else:
- options = ' -x'
- executable = fix_jython_executable(executable, options)
- hdr = "#!%(executable)s%(options)s\n" % locals()
- return hdr
-
-def auto_chmod(func, arg, exc):
- if func is os.remove and os.name=='nt':
- chmod(arg, stat.S_IWRITE)
- return func(arg)
- exc = sys.exc_info()
- raise exc[0], (exc[1][0], exc[1][1] + (" %s %s" % (func,arg)))
-
-def uncache_zipdir(path):
- """Ensure that the importer caches dont have stale info for `path`"""
- from zipimport import _zip_directory_cache as zdc
- _uncache(path, zdc)
- _uncache(path, sys.path_importer_cache)
-
-def _uncache(path, cache):
- if path in cache:
- del cache[path]
- else:
- path = normalize_path(path)
- for p in cache:
- if normalize_path(p)==path:
- del cache[p]
- return
-
-def is_python(text, filename='<string>'):
- "Is this string a valid Python script?"
- try:
- compile(text, filename, 'exec')
- except (SyntaxError, TypeError):
- return False
- else:
- return True
-
-def is_sh(executable):
- """Determine if the specified executable is a .sh (contains a #! line)"""
- try:
- fp = open(executable)
- magic = fp.read(2)
- fp.close()
- except (OSError,IOError): return executable
- return magic == '#!'
-
-def nt_quote_arg(arg):
- """Quote a command line argument according to Windows parsing rules"""
-
- result = []
- needquote = False
- nb = 0
-
- needquote = (" " in arg) or ("\t" in arg)
- if needquote:
- result.append('"')
-
- for c in arg:
- if c == '\\':
- nb += 1
- elif c == '"':
- # double preceding backslashes, then add a \"
- result.append('\\' * (nb*2) + '\\"')
- nb = 0
- else:
- if nb:
- result.append('\\' * nb)
- nb = 0
- result.append(c)
-
- if nb:
- result.append('\\' * nb)
-
- if needquote:
- result.append('\\' * nb) # double the trailing backslashes
- result.append('"')
-
- return ''.join(result)
-
-
-
-
-
-
-
-
-
-def is_python_script(script_text, filename):
- """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
- """
- if filename.endswith('.py') or filename.endswith('.pyw'):
- return True # extension says it's Python
- if is_python(script_text, filename):
- return True # it's syntactically valid Python
- if script_text.startswith('#!'):
- # It begins with a '#!' line, so check if 'python' is in it somewhere
- return 'python' in script_text.splitlines()[0].lower()
-
- return False # Not any Python I can recognize
-
-try:
- from os import chmod as _chmod
-except ImportError:
- # Jython compatibility
- def _chmod(*args): pass
-
-def chmod(path, mode):
- log.debug("changing mode of %s to %o", path, mode)
- try:
- _chmod(path, mode)
- except os.error, e:
- log.debug("chmod failed: %s", e)
-
-def fix_jython_executable(executable, options):
- if sys.platform.startswith('java') and is_sh(executable):
- # Workaround Jython's sys.executable being a .sh (an invalid
- # shebang line interpreter)
- if options:
- # Can't apply the workaround, leave it broken
- log.warn("WARNING: Unable to adapt shebang line for Jython,"
- " the following script is NOT executable\n"
- " see http://bugs.jython.org/issue1112 for"
- " more information.")
- else:
- return '/usr/bin/env %s' % executable
- return executable
-
-
-def get_script_args(dist, executable=sys_executable, wininst=False):
- """Yield write_script() argument tuples for a distribution's entrypoints"""
- spec = str(dist.as_requirement())
- header = get_script_header("", executable, wininst)
- for group in 'console_scripts', 'gui_scripts':
- for name, ep in dist.get_entry_map(group).items():
- script_text = (
- "# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r\n"
- "__requires__ = %(spec)r\n"
- "import sys\n"
- "from pkg_resources import load_entry_point\n"
- "\n"
- "if __name__ == '__main__':"
- "\n"
- " sys.exit(\n"
- " load_entry_point(%(spec)r, %(group)r, %(name)r)()\n"
- " )\n"
- ) % locals()
- if sys.platform=='win32' or wininst:
- # On Windows/wininst, add a .py extension and an .exe launcher
- if group=='gui_scripts':
- ext, launcher = '-script.pyw', 'gui.exe'
- old = ['.pyw']
- new_header = re.sub('(?i)python.exe','pythonw.exe',header)
- else:
- ext, launcher = '-script.py', 'cli.exe'
- old = ['.py','.pyc','.pyo']
- new_header = re.sub('(?i)pythonw.exe','python.exe',header)
-
- if os.path.exists(new_header[2:-1]) or sys.platform!='win32':
- hdr = new_header
- else:
- hdr = header
- yield (name+ext, hdr+script_text, 't', [name+x for x in old])
- yield (
- name+'.exe', resource_string('setuptools', launcher),
- 'b' # write in binary mode
- )
- else:
- # On other platforms, we assume the right thing to do is to
- # just write the stub with no extension.
- yield (name, header+script_text)
-
-def rmtree(path, ignore_errors=False, onerror=auto_chmod):
- """Recursively delete a directory tree.
-
- This code is taken from the Python 2.4 version of 'shutil', because
- the 2.3 version doesn't really work right.
- """
- if ignore_errors:
- def onerror(*args):
- pass
- elif onerror is None:
- def onerror(*args):
- raise
- names = []
- try:
- names = os.listdir(path)
- except os.error, err:
- onerror(os.listdir, path, sys.exc_info())
- for name in names:
- fullname = os.path.join(path, name)
- try:
- mode = os.lstat(fullname).st_mode
- except os.error:
- mode = 0
- if stat.S_ISDIR(mode):
- rmtree(fullname, ignore_errors, onerror)
- else:
- try:
- os.remove(fullname)
- except os.error, err:
- onerror(os.remove, fullname, sys.exc_info())
- try:
- os.rmdir(path)
- except os.error:
- onerror(os.rmdir, path, sys.exc_info())
-
-def bootstrap():
- # This function is called when setuptools*.egg is run using /bin/sh
- import setuptools; argv0 = os.path.dirname(setuptools.__path__[0])
- sys.argv[0] = argv0; sys.argv.append(argv0); main()
-
-def main(argv=None, **kw):
- from setuptools import setup
- from setuptools.dist import Distribution
- import distutils.core
-
- USAGE = """\
-usage: %(script)s [options] requirement_or_url ...
- or: %(script)s --help
-"""
-
- def gen_usage (script_name):
- script = os.path.basename(script_name)
- return USAGE % vars()
-
- def with_ei_usage(f):
- old_gen_usage = distutils.core.gen_usage
- try:
- distutils.core.gen_usage = gen_usage
- return f()
- finally:
- distutils.core.gen_usage = old_gen_usage
-
- class DistributionWithoutHelpCommands(Distribution):
- common_usage = ""
-
- def _show_help(self,*args,**kw):
- with_ei_usage(lambda: Distribution._show_help(self,*args,**kw))
-
- def find_config_files(self):
- files = Distribution.find_config_files(self)
- if 'setup.cfg' in files:
- files.remove('setup.cfg')
- return files
-
- if argv is None:
- argv = sys.argv[1:]
-
- with_ei_usage(lambda:
- setup(
- script_args = ['-q','easy_install', '-v']+argv,
- script_name = sys.argv[0] or 'easy_install',
- distclass=DistributionWithoutHelpCommands, **kw
- )
- )
-
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py
deleted file mode 100755
index 46cdf4e0..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py
+++ /dev/null
@@ -1,457 +0,0 @@
-"""setuptools.command.egg_info
-
-Create a distribution's .egg-info directory and contents"""
-
-# This module should be kept compatible with Python 2.3
-import os, re, sys
-from setuptools import Command
-from distutils.errors import *
-from distutils import log
-from setuptools.command.sdist import sdist
-from distutils.util import convert_path
-from distutils.filelist import FileList
-from pkg_resources import parse_requirements, safe_name, parse_version, \
- safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename
-from sdist import walk_revctrl
-
-class egg_info(Command):
- description = "create a distribution's .egg-info directory"
-
- user_options = [
- ('egg-base=', 'e', "directory containing .egg-info directories"
- " (default: top of the source tree)"),
- ('tag-svn-revision', 'r',
- "Add subversion revision ID to version number"),
- ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
- ('tag-build=', 'b', "Specify explicit tag to add to version number"),
- ('no-svn-revision', 'R',
- "Don't add subversion revision ID [default]"),
- ('no-date', 'D', "Don't include date stamp [default]"),
- ]
-
- boolean_options = ['tag-date', 'tag-svn-revision']
- negative_opt = {'no-svn-revision': 'tag-svn-revision',
- 'no-date': 'tag-date'}
-
-
-
-
-
-
-
- def initialize_options(self):
- self.egg_name = None
- self.egg_version = None
- self.egg_base = None
- self.egg_info = None
- self.tag_build = None
- self.tag_svn_revision = 0
- self.tag_date = 0
- self.broken_egg_info = False
- self.vtags = None
-
- def save_version_info(self, filename):
- from setopt import edit_config
- edit_config(
- filename,
- {'egg_info':
- {'tag_svn_revision':0, 'tag_date': 0, 'tag_build': self.tags()}
- }
- )
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def finalize_options (self):
- self.egg_name = safe_name(self.distribution.get_name())
- self.vtags = self.tags()
- self.egg_version = self.tagged_version()
-
- try:
- list(
- parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
- )
- except ValueError:
- raise DistutilsOptionError(
- "Invalid distribution name or version syntax: %s-%s" %
- (self.egg_name,self.egg_version)
- )
-
- if self.egg_base is None:
- dirs = self.distribution.package_dir
- self.egg_base = (dirs or {}).get('',os.curdir)
-
- self.ensure_dirname('egg_base')
- self.egg_info = to_filename(self.egg_name)+'.egg-info'
- if self.egg_base != os.curdir:
- self.egg_info = os.path.join(self.egg_base, self.egg_info)
- if '-' in self.egg_name: self.check_broken_egg_info()
-
- # Set package version for the benefit of dumber commands
- # (e.g. sdist, bdist_wininst, etc.)
- #
- self.distribution.metadata.version = self.egg_version
-
- # If we bootstrapped around the lack of a PKG-INFO, as might be the
- # case in a fresh checkout, make sure that any special tags get added
- # to the version info
- #
- pd = self.distribution._patched_dist
- if pd is not None and pd.key==self.egg_name.lower():
- pd._version = self.egg_version
- pd._parsed_version = parse_version(self.egg_version)
- self.distribution._patched_dist = None
-
-
- def write_or_delete_file(self, what, filename, data, force=False):
- """Write `data` to `filename` or delete if empty
-
- If `data` is non-empty, this routine is the same as ``write_file()``.
- If `data` is empty but not ``None``, this is the same as calling
- ``delete_file(filename)`. If `data` is ``None``, then this is a no-op
- unless `filename` exists, in which case a warning is issued about the
- orphaned file (if `force` is false), or deleted (if `force` is true).
- """
- if data:
- self.write_file(what, filename, data)
- elif os.path.exists(filename):
- if data is None and not force:
- log.warn(
- "%s not set in setup(), but %s exists", what, filename
- )
- return
- else:
- self.delete_file(filename)
-
- def write_file(self, what, filename, data):
- """Write `data` to `filename` (if not a dry run) after announcing it
-
- `what` is used in a log message to identify what is being written
- to the file.
- """
- log.info("writing %s to %s", what, filename)
- if sys.version_info >= (3,):
- data = data.encode("utf-8")
- if not self.dry_run:
- f = open(filename, 'wb')
- f.write(data)
- f.close()
-
- def delete_file(self, filename):
- """Delete `filename` (if not a dry run) after announcing it"""
- log.info("deleting %s", filename)
- if not self.dry_run:
- os.unlink(filename)
-
- def tagged_version(self):
- return safe_version(self.distribution.get_version() + self.vtags)
-
- def run(self):
- self.mkpath(self.egg_info)
- installer = self.distribution.fetch_build_egg
- for ep in iter_entry_points('egg_info.writers'):
- writer = ep.load(installer=installer)
- writer(self, ep.name, os.path.join(self.egg_info,ep.name))
-
- # Get rid of native_libs.txt if it was put there by older bdist_egg
- nl = os.path.join(self.egg_info, "native_libs.txt")
- if os.path.exists(nl):
- self.delete_file(nl)
-
- self.find_sources()
-
- def tags(self):
- version = ''
- if self.tag_build:
- version+=self.tag_build
- if self.tag_svn_revision and (
- os.path.exists('.svn') or os.path.exists('PKG-INFO')
- ): version += '-r%s' % self.get_svn_revision()
- if self.tag_date:
- import time; version += time.strftime("-%Y%m%d")
- return version
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def get_svn_revision(self):
- revision = 0
- urlre = re.compile('url="([^"]+)"')
- revre = re.compile('committed-rev="(\d+)"')
-
- for base,dirs,files in os.walk(os.curdir):
- if '.svn' not in dirs:
- dirs[:] = []
- continue # no sense walking uncontrolled subdirs
- dirs.remove('.svn')
- f = open(os.path.join(base,'.svn','entries'))
- data = f.read()
- f.close()
-
- if data.startswith('10') or data.startswith('9') or data.startswith('8'):
- data = map(str.splitlines,data.split('\n\x0c\n'))
- del data[0][0] # get rid of the '8' or '9' or '10'
- dirurl = data[0][3]
- localrev = max([int(d[9]) for d in data if len(d)>9 and d[9]]+[0])
- elif data.startswith('<?xml'):
- dirurl = urlre.search(data).group(1) # get repository URL
- localrev = max([int(m.group(1)) for m in revre.finditer(data)]+[0])
- else:
- log.warn("unrecognized .svn/entries format; skipping %s", base)
- dirs[:] = []
- continue
- if base==os.curdir:
- base_url = dirurl+'/' # save the root url
- elif not dirurl.startswith(base_url):
- dirs[:] = []
- continue # not part of the same svn tree, skip it
- revision = max(revision, localrev)
-
- return str(revision or get_pkg_info_revision())
-
-
-
-
-
-
-
- def find_sources(self):
- """Generate SOURCES.txt manifest file"""
- manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
- mm = manifest_maker(self.distribution)
- mm.manifest = manifest_filename
- mm.run()
- self.filelist = mm.filelist
-
- def check_broken_egg_info(self):
- bei = self.egg_name+'.egg-info'
- if self.egg_base != os.curdir:
- bei = os.path.join(self.egg_base, bei)
- if os.path.exists(bei):
- log.warn(
- "-"*78+'\n'
- "Note: Your current .egg-info directory has a '-' in its name;"
- '\nthis will not work correctly with "setup.py develop".\n\n'
- 'Please rename %s to %s to correct this problem.\n'+'-'*78,
- bei, self.egg_info
- )
- self.broken_egg_info = self.egg_info
- self.egg_info = bei # make it work for now
-
-class FileList(FileList):
- """File list that accepts only existing, platform-independent paths"""
-
- def append(self, item):
- if item.endswith('\r'): # Fix older sdists built on Windows
- item = item[:-1]
- path = convert_path(item)
- if os.path.exists(path):
- self.files.append(path)
-
-
-
-
-
-
-
-
-
-class manifest_maker(sdist):
-
- template = "MANIFEST.in"
-
- def initialize_options (self):
- self.use_defaults = 1
- self.prune = 1
- self.manifest_only = 1
- self.force_manifest = 1
-
- def finalize_options(self):
- pass
-
- def run(self):
- self.filelist = FileList()
- if not os.path.exists(self.manifest):
- self.write_manifest() # it must exist so it'll get in the list
- self.filelist.findall()
- self.add_defaults()
- if os.path.exists(self.template):
- self.read_template()
- self.prune_file_list()
- self.filelist.sort()
- self.filelist.remove_duplicates()
- self.write_manifest()
-
- def write_manifest (self):
- """Write the file list in 'self.filelist' (presumably as filled in
- by 'add_defaults()' and 'read_template()') to the manifest file
- named by 'self.manifest'.
- """
- files = self.filelist.files
- if os.sep!='/':
- files = [f.replace(os.sep,'/') for f in files]
- self.execute(write_file, (self.manifest, files),
- "writing manifest file '%s'" % self.manifest)
-
- def warn(self, msg): # suppress missing-file warnings from sdist
- if not msg.startswith("standard file not found:"):
- sdist.warn(self, msg)
-
- def add_defaults(self):
- sdist.add_defaults(self)
- self.filelist.append(self.template)
- self.filelist.append(self.manifest)
- rcfiles = list(walk_revctrl())
- if rcfiles:
- self.filelist.extend(rcfiles)
- elif os.path.exists(self.manifest):
- self.read_manifest()
- ei_cmd = self.get_finalized_command('egg_info')
- self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
-
- def prune_file_list (self):
- build = self.get_finalized_command('build')
- base_dir = self.distribution.get_fullname()
- self.filelist.exclude_pattern(None, prefix=build.build_base)
- self.filelist.exclude_pattern(None, prefix=base_dir)
- sep = re.escape(os.sep)
- self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
-
-
-def write_file (filename, contents):
- """Create a file with the specified name and write 'contents' (a
- sequence of strings without line terminators) to it.
- """
- contents = "\n".join(contents)
- if sys.version_info >= (3,):
- contents = contents.encode("utf-8")
- f = open(filename, "wb") # always write POSIX-style manifest
- f.write(contents)
- f.close()
-
-
-
-
-
-
-
-
-
-
-
-
-
-def write_pkg_info(cmd, basename, filename):
- log.info("writing %s", filename)
- if not cmd.dry_run:
- metadata = cmd.distribution.metadata
- metadata.version, oldver = cmd.egg_version, metadata.version
- metadata.name, oldname = cmd.egg_name, metadata.name
- try:
- # write unescaped data to PKG-INFO, so older pkg_resources
- # can still parse it
- metadata.write_pkg_info(cmd.egg_info)
- finally:
- metadata.name, metadata.version = oldname, oldver
-
- safe = getattr(cmd.distribution,'zip_safe',None)
- import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe)
-
-def warn_depends_obsolete(cmd, basename, filename):
- if os.path.exists(filename):
- log.warn(
- "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
- "Use the install_requires/extras_require setup() args instead."
- )
-
-
-def write_requirements(cmd, basename, filename):
- dist = cmd.distribution
- data = ['\n'.join(yield_lines(dist.install_requires or ()))]
- for extra,reqs in (dist.extras_require or {}).items():
- data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
- cmd.write_or_delete_file("requirements", filename, ''.join(data))
-
-def write_toplevel_names(cmd, basename, filename):
- pkgs = dict.fromkeys(
- [k.split('.',1)[0]
- for k in cmd.distribution.iter_distribution_names()
- ]
- )
- cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
-
-
-
-def overwrite_arg(cmd, basename, filename):
- write_arg(cmd, basename, filename, True)
-
-def write_arg(cmd, basename, filename, force=False):
- argname = os.path.splitext(basename)[0]
- value = getattr(cmd.distribution, argname, None)
- if value is not None:
- value = '\n'.join(value)+'\n'
- cmd.write_or_delete_file(argname, filename, value, force)
-
-def write_entries(cmd, basename, filename):
- ep = cmd.distribution.entry_points
-
- if isinstance(ep,basestring) or ep is None:
- data = ep
- elif ep is not None:
- data = []
- for section, contents in ep.items():
- if not isinstance(contents,basestring):
- contents = EntryPoint.parse_group(section, contents)
- contents = '\n'.join(map(str,contents.values()))
- data.append('[%s]\n%s\n\n' % (section,contents))
- data = ''.join(data)
-
- cmd.write_or_delete_file('entry points', filename, data, True)
-
-def get_pkg_info_revision():
- # See if we can get a -r### off of PKG-INFO, in case this is an sdist of
- # a subversion revision
- #
- if os.path.exists('PKG-INFO'):
- f = open('PKG-INFO','rU')
- for line in f:
- match = re.match(r"Version:.*-r(\d+)\s*$", line)
- if match:
- return int(match.group(1))
- f.close()
- return 0
-
-
-
-#
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py
deleted file mode 100755
index 247c4f25..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py
+++ /dev/null
@@ -1,124 +0,0 @@
-import setuptools, sys, glob
-from distutils.command.install import install as _install
-from distutils.errors import DistutilsArgError
-
-class install(_install):
- """Use easy_install to install the package, w/dependencies"""
-
- user_options = _install.user_options + [
- ('old-and-unmanageable', None, "Try not to use this!"),
- ('single-version-externally-managed', None,
- "used by system package builders to create 'flat' eggs"),
- ]
- boolean_options = _install.boolean_options + [
- 'old-and-unmanageable', 'single-version-externally-managed',
- ]
- new_commands = [
- ('install_egg_info', lambda self: True),
- ('install_scripts', lambda self: True),
- ]
- _nc = dict(new_commands)
-
- def initialize_options(self):
- _install.initialize_options(self)
- self.old_and_unmanageable = None
- self.single_version_externally_managed = None
- self.no_compile = None # make DISTUTILS_DEBUG work right!
-
- def finalize_options(self):
- _install.finalize_options(self)
- if self.root:
- self.single_version_externally_managed = True
- elif self.single_version_externally_managed:
- if not self.root and not self.record:
- raise DistutilsArgError(
- "You must specify --record or --root when building system"
- " packages"
- )
-
- def handle_extra_path(self):
- if self.root or self.single_version_externally_managed:
- # explicit backward-compatibility mode, allow extra_path to work
- return _install.handle_extra_path(self)
-
- # Ignore extra_path when installing an egg (or being run by another
- # command without --root or --single-version-externally-managed
- self.path_file = None
- self.extra_dirs = ''
-
-
- def run(self):
- # Explicit request for old-style install? Just do it
- if self.old_and_unmanageable or self.single_version_externally_managed:
- return _install.run(self)
-
- # Attempt to detect whether we were called from setup() or by another
- # command. If we were called by setup(), our caller will be the
- # 'run_command' method in 'distutils.dist', and *its* caller will be
- # the 'run_commands' method. If we were called any other way, our
- # immediate caller *might* be 'run_command', but it won't have been
- # called by 'run_commands'. This is slightly kludgy, but seems to
- # work.
- #
- caller = sys._getframe(2)
- caller_module = caller.f_globals.get('__name__','')
- caller_name = caller.f_code.co_name
-
- if caller_module != 'distutils.dist' or caller_name!='run_commands':
- # We weren't called from the command line or setup(), so we
- # should run in backward-compatibility mode to support bdist_*
- # commands.
- _install.run(self)
- else:
- self.do_egg_install()
-
-
-
-
-
-
- def do_egg_install(self):
-
- easy_install = self.distribution.get_command_class('easy_install')
-
- cmd = easy_install(
- self.distribution, args="x", root=self.root, record=self.record,
- )
- cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
- cmd.always_copy_from = '.' # make sure local-dir eggs get installed
-
- # pick up setup-dir .egg files only: no .egg-info
- cmd.package_index.scan(glob.glob('*.egg'))
-
- self.run_command('bdist_egg')
- args = [self.distribution.get_command_obj('bdist_egg').egg_output]
-
- if setuptools.bootstrap_install_from:
- # Bootstrap self-installation of setuptools
- args.insert(0, setuptools.bootstrap_install_from)
-
- cmd.args = args
- cmd.run()
- setuptools.bootstrap_install_from = None
-
-# XXX Python 3.1 doesn't see _nc if this is inside the class
-install.sub_commands = [
- cmd for cmd in _install.sub_commands if cmd[0] not in install._nc
- ] + install.new_commands
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-#
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py
deleted file mode 100755
index dd95552e..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py
+++ /dev/null
@@ -1,123 +0,0 @@
-from setuptools import Command
-from setuptools.archive_util import unpack_archive
-from distutils import log, dir_util
-import os, shutil, pkg_resources
-
-class install_egg_info(Command):
- """Install an .egg-info directory for the package"""
-
- description = "Install an .egg-info directory for the package"
-
- user_options = [
- ('install-dir=', 'd', "directory to install to"),
- ]
-
- def initialize_options(self):
- self.install_dir = None
-
- def finalize_options(self):
- self.set_undefined_options('install_lib',('install_dir','install_dir'))
- ei_cmd = self.get_finalized_command("egg_info")
- basename = pkg_resources.Distribution(
- None, None, ei_cmd.egg_name, ei_cmd.egg_version
- ).egg_name()+'.egg-info'
- self.source = ei_cmd.egg_info
- self.target = os.path.join(self.install_dir, basename)
- self.outputs = [self.target]
-
- def run(self):
- self.run_command('egg_info')
- target = self.target
- if os.path.isdir(self.target) and not os.path.islink(self.target):
- dir_util.remove_tree(self.target, dry_run=self.dry_run)
- elif os.path.exists(self.target):
- self.execute(os.unlink,(self.target,),"Removing "+self.target)
- if not self.dry_run:
- pkg_resources.ensure_directory(self.target)
- self.execute(self.copytree, (),
- "Copying %s to %s" % (self.source, self.target)
- )
- self.install_namespaces()
-
- def get_outputs(self):
- return self.outputs
-
- def copytree(self):
- # Copy the .egg-info tree to site-packages
- def skimmer(src,dst):
- # filter out source-control directories; note that 'src' is always
- # a '/'-separated path, regardless of platform. 'dst' is a
- # platform-specific path.
- for skip in '.svn/','CVS/':
- if src.startswith(skip) or '/'+skip in src:
- return None
- self.outputs.append(dst)
- log.debug("Copying %s to %s", src, dst)
- return dst
- unpack_archive(self.source, self.target, skimmer)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def install_namespaces(self):
- nsp = self._get_all_ns_packages()
- if not nsp: return
- filename,ext = os.path.splitext(self.target)
- filename += '-nspkg.pth'; self.outputs.append(filename)
- log.info("Installing %s",filename)
- if not self.dry_run:
- f = open(filename,'wt')
- for pkg in nsp:
- pth = tuple(pkg.split('.'))
- trailer = '\n'
- if '.' in pkg:
- trailer = (
- "; m and setattr(sys.modules[%r], %r, m)\n"
- % ('.'.join(pth[:-1]), pth[-1])
- )
- f.write(
- "import sys,types,os; "
- "p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
- "*%(pth)r); "
- "ie = os.path.exists(os.path.join(p,'__init__.py')); "
- "m = not ie and "
- "sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); "
- "mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
- "(p not in mp) and mp.append(p)%(trailer)s"
- % locals()
- )
- f.close()
-
- def _get_all_ns_packages(self):
- nsp = {}
- for pkg in self.distribution.namespace_packages or []:
- pkg = pkg.split('.')
- while pkg:
- nsp['.'.join(pkg)] = 1
- pkg.pop()
- nsp=list(nsp)
- nsp.sort() # set up shorter names first
- return nsp
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py
deleted file mode 100755
index 82afa142..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py
+++ /dev/null
@@ -1,82 +0,0 @@
-from distutils.command.install_lib import install_lib as _install_lib
-import os
-
-class install_lib(_install_lib):
- """Don't add compiled flags to filenames of non-Python files"""
-
- def _bytecode_filenames (self, py_filenames):
- bytecode_files = []
- for py_file in py_filenames:
- if not py_file.endswith('.py'):
- continue
- if self.compile:
- bytecode_files.append(py_file + "c")
- if self.optimize > 0:
- bytecode_files.append(py_file + "o")
-
- return bytecode_files
-
- def run(self):
- self.build()
- outfiles = self.install()
- if outfiles is not None:
- # always compile, in case we have any extension stubs to deal with
- self.byte_compile(outfiles)
-
- def get_exclusions(self):
- exclude = {}
- nsp = self.distribution.namespace_packages
-
- if (nsp and self.get_finalized_command('install')
- .single_version_externally_managed
- ):
- for pkg in nsp:
- parts = pkg.split('.')
- while parts:
- pkgdir = os.path.join(self.install_dir, *parts)
- for f in '__init__.py', '__init__.pyc', '__init__.pyo':
- exclude[os.path.join(pkgdir,f)] = 1
- parts.pop()
- return exclude
-
- def copy_tree(
- self, infile, outfile,
- preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
- ):
- assert preserve_mode and preserve_times and not preserve_symlinks
- exclude = self.get_exclusions()
-
- if not exclude:
- return _install_lib.copy_tree(self, infile, outfile)
-
- # Exclude namespace package __init__.py* files from the output
-
- from setuptools.archive_util import unpack_directory
- from distutils import log
-
- outfiles = []
-
- def pf(src, dst):
- if dst in exclude:
- log.warn("Skipping installation of %s (namespace package)",dst)
- return False
-
- log.info("copying %s -> %s", src, os.path.dirname(dst))
- outfiles.append(dst)
- return dst
-
- unpack_directory(infile, outfile, pf)
- return outfiles
-
- def get_outputs(self):
- outputs = _install_lib.get_outputs(self)
- exclude = self.get_exclusions()
- if exclude:
- return [f for f in outputs if f not in exclude]
- return outputs
-
-
-
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py
deleted file mode 100755
index 6ce1b993..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from distutils.command.install_scripts import install_scripts \
- as _install_scripts
-from pkg_resources import Distribution, PathMetadata, ensure_directory
-import os
-from distutils import log
-
-class install_scripts(_install_scripts):
- """Do normal script install, plus any egg_info wrapper scripts"""
-
- def initialize_options(self):
- _install_scripts.initialize_options(self)
- self.no_ep = False
-
- def run(self):
- from setuptools.command.easy_install import get_script_args
- from setuptools.command.easy_install import sys_executable
-
- self.run_command("egg_info")
- if self.distribution.scripts:
- _install_scripts.run(self) # run first to set up self.outfiles
- else:
- self.outfiles = []
- if self.no_ep:
- # don't install entry point scripts into .egg file!
- return
-
- ei_cmd = self.get_finalized_command("egg_info")
- dist = Distribution(
- ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
- ei_cmd.egg_name, ei_cmd.egg_version,
- )
- bs_cmd = self.get_finalized_command('build_scripts')
- executable = getattr(bs_cmd,'executable',sys_executable)
- is_wininst = getattr(
- self.get_finalized_command("bdist_wininst"), '_is_running', False
- )
- for args in get_script_args(dist, executable, is_wininst):
- self.write_script(*args)
-
- def write_script(self, script_name, contents, mode="t", *ignored):
- """Write an executable file to the scripts directory"""
- from setuptools.command.easy_install import chmod
- log.info("Installing %s script to %s", script_name, self.install_dir)
- target = os.path.join(self.install_dir, script_name)
- self.outfiles.append(target)
-
- if not self.dry_run:
- ensure_directory(target)
- f = open(target,"w"+mode)
- f.write(contents)
- f.close()
- chmod(target,0755)
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py
deleted file mode 100755
index 3b2e0859..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from distutils.command.register import register as _register
-
-class register(_register):
- __doc__ = _register.__doc__
-
- def run(self):
- # Make sure that we are using valid current name/version info
- self.run_command('egg_info')
- _register.run(self)
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py
deleted file mode 100755
index 11b6eae8..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import distutils, os
-from setuptools import Command
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import *
-
-class rotate(Command):
- """Delete older distributions"""
-
- description = "delete older distributions, keeping N newest files"
- user_options = [
- ('match=', 'm', "patterns to match (required)"),
- ('dist-dir=', 'd', "directory where the distributions are"),
- ('keep=', 'k', "number of matching distributions to keep"),
- ]
-
- boolean_options = []
-
- def initialize_options(self):
- self.match = None
- self.dist_dir = None
- self.keep = None
-
- def finalize_options(self):
- if self.match is None:
- raise DistutilsOptionError(
- "Must specify one or more (comma-separated) match patterns "
- "(e.g. '.zip' or '.egg')"
- )
- if self.keep is None:
- raise DistutilsOptionError("Must specify number of files to keep")
- try:
- self.keep = int(self.keep)
- except ValueError:
- raise DistutilsOptionError("--keep must be an integer")
- if isinstance(self.match, basestring):
- self.match = [
- convert_path(p.strip()) for p in self.match.split(',')
- ]
- self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
-
- def run(self):
- self.run_command("egg_info")
- from glob import glob
- for pattern in self.match:
- pattern = self.distribution.get_name()+'*'+pattern
- files = glob(os.path.join(self.dist_dir,pattern))
- files = [(os.path.getmtime(f),f) for f in files]
- files.sort()
- files.reverse()
-
- log.info("%d file(s) matching %s", len(files), pattern)
- files = files[self.keep:]
- for (t,f) in files:
- log.info("Deleting %s", f)
- if not self.dry_run:
- os.unlink(f)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py
deleted file mode 100755
index 1180a440..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import distutils, os
-from setuptools import Command
-from setuptools.command.setopt import edit_config, option_base
-
-class saveopts(option_base):
- """Save command-line options to a file"""
-
- description = "save supplied options to setup.cfg or other config file"
-
- def run(self):
- dist = self.distribution
- commands = dist.command_options.keys()
- settings = {}
-
- for cmd in commands:
-
- if cmd=='saveopts':
- continue # don't save our own options!
-
- for opt,(src,val) in dist.get_option_dict(cmd).items():
- if src=="command line":
- settings.setdefault(cmd,{})[opt] = val
-
- edit_config(self.filename, settings, self.dry_run)
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py
deleted file mode 100755
index 3442fe4b..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py
+++ /dev/null
@@ -1,252 +0,0 @@
-from distutils.command.sdist import sdist as _sdist
-from distutils.util import convert_path
-from distutils import log
-import os, re, sys, pkg_resources
-from glob import glob
-
-entities = [
- ("&lt;","<"), ("&gt;", ">"), ("&quot;", '"'), ("&apos;", "'"),
- ("&amp;", "&")
-]
-
-def unescape(data):
- for old,new in entities:
- data = data.replace(old,new)
- return data
-
-def re_finder(pattern, postproc=None):
- def find(dirname, filename):
- f = open(filename,'rU')
- data = f.read()
- f.close()
- for match in pattern.finditer(data):
- path = match.group(1)
- if postproc:
- path = postproc(path)
- yield joinpath(dirname,path)
- return find
-
-def joinpath(prefix,suffix):
- if not prefix:
- return suffix
- return os.path.join(prefix,suffix)
-
-
-
-
-
-
-
-
-
-
-def walk_revctrl(dirname=''):
- """Find all files under revision control"""
- for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
- for item in ep.load()(dirname):
- yield item
-
-def _default_revctrl(dirname=''):
- for path, finder in finders:
- path = joinpath(dirname,path)
- if os.path.isfile(path):
- for path in finder(dirname,path):
- if os.path.isfile(path):
- yield path
- elif os.path.isdir(path):
- for item in _default_revctrl(path):
- yield item
-
-def externals_finder(dirname, filename):
- """Find any 'svn:externals' directories"""
- found = False
- f = open(filename,'rt')
- for line in iter(f.readline, ''): # can't use direct iter!
- parts = line.split()
- if len(parts)==2:
- kind,length = parts
- data = f.read(int(length))
- if kind=='K' and data=='svn:externals':
- found = True
- elif kind=='V' and found:
- f.close()
- break
- else:
- f.close()
- return
-
- for line in data.splitlines():
- parts = line.split()
- if parts:
- yield joinpath(dirname, parts[0])
-
-
-entries_pattern = re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I)
-
-def entries_finder(dirname, filename):
- f = open(filename,'rU')
- data = f.read()
- f.close()
- if data.startswith('10') or data.startswith('9') or data.startswith('8'):
- for record in map(str.splitlines, data.split('\n\x0c\n')[1:]):
- # subversion 1.6/1.5/1.4
- if not record or len(record)>=6 and record[5]=="delete":
- continue # skip deleted
- yield joinpath(dirname, record[0])
- elif data.startswith('<?xml'):
- for match in entries_pattern.finditer(data):
- yield joinpath(dirname,unescape(match.group(1)))
- else:
- log.warn("unrecognized .svn/entries format in %s", dirname)
-
-
-finders = [
- (convert_path('CVS/Entries'),
- re_finder(re.compile(r"^\w?/([^/]+)/", re.M))),
- (convert_path('.svn/entries'), entries_finder),
- (convert_path('.svn/dir-props'), externals_finder),
- (convert_path('.svn/dir-prop-base'), externals_finder), # svn 1.4
-]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class sdist(_sdist):
- """Smart sdist that finds anything supported by revision control"""
-
- user_options = [
- ('formats=', None,
- "formats for source distribution (comma-separated list)"),
- ('keep-temp', 'k',
- "keep the distribution tree around after creating " +
- "archive file(s)"),
- ('dist-dir=', 'd',
- "directory to put the source distribution archive(s) in "
- "[default: dist]"),
- ]
-
- negative_opt = {}
-
- def run(self):
- self.run_command('egg_info')
- ei_cmd = self.get_finalized_command('egg_info')
- self.filelist = ei_cmd.filelist
- self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
- self.check_readme()
- self.check_metadata()
- self.make_distribution()
-
- dist_files = getattr(self.distribution,'dist_files',[])
- for file in self.archive_files:
- data = ('sdist', '', file)
- if data not in dist_files:
- dist_files.append(data)
-
- def add_defaults(self):
- standards = [('README', 'README.txt'),
- self.distribution.script_name]
- for fn in standards:
- if isinstance(fn, tuple):
- alts = fn
- got_it = 0
- for fn in alts:
- if os.path.exists(fn):
- got_it = 1
- self.filelist.append(fn)
- break
-
- if not got_it:
- self.warn("standard file not found: should have one of " +
- ', '.join(alts))
- else:
- if os.path.exists(fn):
- self.filelist.append(fn)
- else:
- self.warn("standard file '%s' not found" % fn)
-
- optional = ['test/test*.py', 'setup.cfg']
- for pattern in optional:
- files = filter(os.path.isfile, glob(pattern))
- if files:
- self.filelist.extend(files)
-
- # getting python files
- if self.distribution.has_pure_modules():
- build_py = self.get_finalized_command('build_py')
- self.filelist.extend(build_py.get_source_files())
-
- if self.distribution.has_ext_modules():
- build_ext = self.get_finalized_command('build_ext')
- self.filelist.extend(build_ext.get_source_files())
-
- if self.distribution.has_c_libraries():
- build_clib = self.get_finalized_command('build_clib')
- self.filelist.extend(build_clib.get_source_files())
-
- if self.distribution.has_scripts():
- build_scripts = self.get_finalized_command('build_scripts')
- self.filelist.extend(build_scripts.get_source_files())
-
- def read_template(self):
- try:
- _sdist.read_template(self)
- except:
- # grody hack to close the template file (MANIFEST.in)
- # this prevents easy_install's attempt at deleting the file from
- # dying and thus masking the real error
- sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
- raise
-
- def check_readme(self):
- alts = ("README", "README.txt")
- for f in alts:
- if os.path.exists(f):
- return
- else:
- self.warn(
- "standard file not found: should have one of " +', '.join(alts)
- )
-
-
- def make_release_tree(self, base_dir, files):
- _sdist.make_release_tree(self, base_dir, files)
-
- # Save any egg_info command line options used to create this sdist
- dest = os.path.join(base_dir, 'setup.cfg')
- if hasattr(os,'link') and os.path.exists(dest):
- # unlink and re-copy, since it might be hard-linked, and
- # we don't want to change the source version
- os.unlink(dest)
- self.copy_file('setup.cfg', dest)
-
- self.get_finalized_command('egg_info').save_version_info(dest)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-#
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py
deleted file mode 100755
index dbf3a94e..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py
+++ /dev/null
@@ -1,164 +0,0 @@
-import distutils, os
-from setuptools import Command
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import *
-
-__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
-
-
-def config_file(kind="local"):
- """Get the filename of the distutils, local, global, or per-user config
-
- `kind` must be one of "local", "global", or "user"
- """
- if kind=='local':
- return 'setup.cfg'
- if kind=='global':
- return os.path.join(
- os.path.dirname(distutils.__file__),'distutils.cfg'
- )
- if kind=='user':
- dot = os.name=='posix' and '.' or ''
- return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
- raise ValueError(
- "config_file() type must be 'local', 'global', or 'user'", kind
- )
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def edit_config(filename, settings, dry_run=False):
- """Edit a configuration file to include `settings`
-
- `settings` is a dictionary of dictionaries or ``None`` values, keyed by
- command/section name. A ``None`` value means to delete the entire section,
- while a dictionary lists settings to be changed or deleted in that section.
- A setting of ``None`` means to delete that setting.
- """
- from ConfigParser import RawConfigParser
- log.debug("Reading configuration from %s", filename)
- opts = RawConfigParser()
- opts.read([filename])
- for section, options in settings.items():
- if options is None:
- log.info("Deleting section [%s] from %s", section, filename)
- opts.remove_section(section)
- else:
- if not opts.has_section(section):
- log.debug("Adding new section [%s] to %s", section, filename)
- opts.add_section(section)
- for option,value in options.items():
- if value is None:
- log.debug("Deleting %s.%s from %s",
- section, option, filename
- )
- opts.remove_option(section,option)
- if not opts.options(section):
- log.info("Deleting empty [%s] section from %s",
- section, filename)
- opts.remove_section(section)
- else:
- log.debug(
- "Setting %s.%s to %r in %s",
- section, option, value, filename
- )
- opts.set(section,option,value)
-
- log.info("Writing %s", filename)
- if not dry_run:
- f = open(filename,'w'); opts.write(f); f.close()
-
-class option_base(Command):
- """Abstract base class for commands that mess with config files"""
-
- user_options = [
- ('global-config', 'g',
- "save options to the site-wide distutils.cfg file"),
- ('user-config', 'u',
- "save options to the current user's pydistutils.cfg file"),
- ('filename=', 'f',
- "configuration file to use (default=setup.cfg)"),
- ]
-
- boolean_options = [
- 'global-config', 'user-config',
- ]
-
- def initialize_options(self):
- self.global_config = None
- self.user_config = None
- self.filename = None
-
- def finalize_options(self):
- filenames = []
- if self.global_config:
- filenames.append(config_file('global'))
- if self.user_config:
- filenames.append(config_file('user'))
- if self.filename is not None:
- filenames.append(self.filename)
- if not filenames:
- filenames.append(config_file('local'))
- if len(filenames)>1:
- raise DistutilsOptionError(
- "Must specify only one configuration file option",
- filenames
- )
- self.filename, = filenames
-
-
-
-
-class setopt(option_base):
- """Save command-line options to a file"""
-
- description = "set an option in setup.cfg or another config file"
-
- user_options = [
- ('command=', 'c', 'command to set an option for'),
- ('option=', 'o', 'option to set'),
- ('set-value=', 's', 'value of the option'),
- ('remove', 'r', 'remove (unset) the value'),
- ] + option_base.user_options
-
- boolean_options = option_base.boolean_options + ['remove']
-
- def initialize_options(self):
- option_base.initialize_options(self)
- self.command = None
- self.option = None
- self.set_value = None
- self.remove = None
-
- def finalize_options(self):
- option_base.finalize_options(self)
- if self.command is None or self.option is None:
- raise DistutilsOptionError("Must specify --command *and* --option")
- if self.set_value is None and not self.remove:
- raise DistutilsOptionError("Must specify --set-value or --remove")
-
- def run(self):
- edit_config(
- self.filename, {
- self.command: {self.option.replace('-','_'):self.set_value}
- },
- self.dry_run
- )
-
-
-
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py
deleted file mode 100755
index b7aef969..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py
+++ /dev/null
@@ -1,180 +0,0 @@
-from setuptools import Command
-from distutils.errors import DistutilsOptionError
-import sys
-from pkg_resources import *
-from unittest import TestLoader, main
-
-class ScanningLoader(TestLoader):
-
- def loadTestsFromModule(self, module):
- """Return a suite of all tests cases contained in the given module
-
- If the module is a package, load tests from all the modules in it.
- If the module has an ``additional_tests`` function, call it and add
- the return value to the tests.
- """
- tests = []
- if module.__name__!='setuptools.tests.doctest': # ugh
- tests.append(TestLoader.loadTestsFromModule(self,module))
-
- if hasattr(module, "additional_tests"):
- tests.append(module.additional_tests())
-
- if hasattr(module, '__path__'):
- for file in resource_listdir(module.__name__, ''):
- if file.endswith('.py') and file!='__init__.py':
- submodule = module.__name__+'.'+file[:-3]
- else:
- if resource_exists(
- module.__name__, file+'/__init__.py'
- ):
- submodule = module.__name__+'.'+file
- else:
- continue
- tests.append(self.loadTestsFromName(submodule))
-
- if len(tests)!=1:
- return self.suiteClass(tests)
- else:
- return tests[0] # don't create a nested suite for only one return
-
-
-class test(Command):
-
- """Command to run unit tests after in-place build"""
-
- description = "run unit tests after in-place build"
-
- user_options = [
- ('test-module=','m', "Run 'test_suite' in specified module"),
- ('test-suite=','s',
- "Test suite to run (e.g. 'some_module.test_suite')"),
- ]
-
- def initialize_options(self):
- self.test_suite = None
- self.test_module = None
- self.test_loader = None
-
-
- def finalize_options(self):
-
- if self.test_suite is None:
- if self.test_module is None:
- self.test_suite = self.distribution.test_suite
- else:
- self.test_suite = self.test_module+".test_suite"
- elif self.test_module:
- raise DistutilsOptionError(
- "You may specify a module or a suite, but not both"
- )
-
- self.test_args = [self.test_suite]
-
- if self.verbose:
- self.test_args.insert(0,'--verbose')
- if self.test_loader is None:
- self.test_loader = getattr(self.distribution,'test_loader',None)
- if self.test_loader is None:
- self.test_loader = "setuptools.command.test:ScanningLoader"
-
-
-
- def with_project_on_sys_path(self, func):
- if getattr(self.distribution, 'use_2to3', False):
- # If we run 2to3 we can not do this inplace:
-
- # Ensure metadata is up-to-date
- self.reinitialize_command('build_py', inplace=0)
- self.run_command('build_py')
- bpy_cmd = self.get_finalized_command("build_py")
- build_path = normalize_path(bpy_cmd.build_lib)
-
- # Build extensions
- self.reinitialize_command('egg_info', egg_base=build_path)
- self.run_command('egg_info')
-
- self.reinitialize_command('build_ext', inplace=0)
- self.run_command('build_ext')
- else:
- # Without 2to3 inplace works fine:
- self.run_command('egg_info')
-
- # Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
- self.run_command('build_ext')
-
- ei_cmd = self.get_finalized_command("egg_info")
-
- old_path = sys.path[:]
- old_modules = sys.modules.copy()
-
- try:
- sys.path.insert(0, normalize_path(ei_cmd.egg_base))
- working_set.__init__()
- add_activation_listener(lambda dist: dist.activate())
- require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
- func()
- finally:
- sys.path[:] = old_path
- sys.modules.clear()
- sys.modules.update(old_modules)
- working_set.__init__()
-
-
- def run(self):
- if self.distribution.install_requires:
- self.distribution.fetch_build_eggs(self.distribution.install_requires)
- if self.distribution.tests_require:
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
-
- if self.test_suite:
- cmd = ' '.join(self.test_args)
- if self.dry_run:
- self.announce('skipping "unittest %s" (dry run)' % cmd)
- else:
- self.announce('running "unittest %s"' % cmd)
- self.with_project_on_sys_path(self.run_tests)
-
-
- def run_tests(self):
- import unittest
- loader_ep = EntryPoint.parse("x="+self.test_loader)
- loader_class = loader_ep.load(require=False)
- unittest.main(
- None, None, [unittest.__file__]+self.test_args,
- testLoader = loader_class()
- )
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py
deleted file mode 100755
index 1f49745e..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py
+++ /dev/null
@@ -1,183 +0,0 @@
-"""distutils.command.upload
-
-Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
-
-from distutils.errors import *
-from distutils.core import Command
-from distutils.spawn import spawn
-from distutils import log
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
-import os
-import socket
-import platform
-import ConfigParser
-import httplib
-import base64
-import urlparse
-import cStringIO as StringIO
-
-class upload(Command):
-
- description = "upload binary package to PyPI"
-
- DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
-
- user_options = [
- ('repository=', 'r',
- "url of repository [default: %s]" % DEFAULT_REPOSITORY),
- ('show-response', None,
- 'display full response text from server'),
- ('sign', 's',
- 'sign files to upload using gpg'),
- ('identity=', 'i', 'GPG identity used to sign files'),
- ]
- boolean_options = ['show-response', 'sign']
-
- def initialize_options(self):
- self.username = ''
- self.password = ''
- self.repository = ''
- self.show_response = 0
- self.sign = False
- self.identity = None
-
- def finalize_options(self):
- if self.identity and not self.sign:
- raise DistutilsOptionError(
- "Must use --sign for --identity to have meaning"
- )
- if os.environ.has_key('HOME'):
- rc = os.path.join(os.environ['HOME'], '.pypirc')
- if os.path.exists(rc):
- self.announce('Using PyPI login from %s' % rc)
- config = ConfigParser.ConfigParser({
- 'username':'',
- 'password':'',
- 'repository':''})
- config.read(rc)
- if not self.repository:
- self.repository = config.get('server-login', 'repository')
- if not self.username:
- self.username = config.get('server-login', 'username')
- if not self.password:
- self.password = config.get('server-login', 'password')
- if not self.repository:
- self.repository = self.DEFAULT_REPOSITORY
-
- def run(self):
- if not self.distribution.dist_files:
- raise DistutilsOptionError("No dist file created in earlier command")
- for command, pyversion, filename in self.distribution.dist_files:
- self.upload_file(command, pyversion, filename)
-
- def upload_file(self, command, pyversion, filename):
- # Sign if requested
- if self.sign:
- gpg_args = ["gpg", "--detach-sign", "-a", filename]
- if self.identity:
- gpg_args[2:2] = ["--local-user", self.identity]
- spawn(gpg_args,
- dry_run=self.dry_run)
-
- # Fill in the data
- f = open(filename,'rb')
- content = f.read()
- f.close()
- basename = os.path.basename(filename)
- comment = ''
- if command=='bdist_egg' and self.distribution.has_ext_modules():
- comment = "built on %s" % platform.platform(terse=1)
- data = {
- ':action':'file_upload',
- 'protcol_version':'1',
- 'name':self.distribution.get_name(),
- 'version':self.distribution.get_version(),
- 'content':(basename,content),
- 'filetype':command,
- 'pyversion':pyversion,
- 'md5_digest':md5(content).hexdigest(),
- }
- if command == 'bdist_rpm':
- dist, version, id = platform.dist()
- if dist:
- comment = 'built for %s %s' % (dist, version)
- elif command == 'bdist_dumb':
- comment = 'built for %s' % platform.platform(terse=1)
- data['comment'] = comment
-
- if self.sign:
- data['gpg_signature'] = (os.path.basename(filename) + ".asc",
- open(filename+".asc").read())
-
- # set up the authentication
- auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
-
- # Build up the MIME payload for the POST data
- boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
- sep_boundary = '\n--' + boundary
- end_boundary = sep_boundary + '--'
- body = StringIO.StringIO()
- for key, value in data.items():
- # handle multiple entries for the same name
- if type(value) != type([]):
- value = [value]
- for value in value:
- if type(value) is tuple:
- fn = ';filename="%s"' % value[0]
- value = value[1]
- else:
- fn = ""
- value = str(value)
- body.write(sep_boundary)
- body.write('\nContent-Disposition: form-data; name="%s"'%key)
- body.write(fn)
- body.write("\n\n")
- body.write(value)
- if value and value[-1] == '\r':
- body.write('\n') # write an extra newline (lurve Macs)
- body.write(end_boundary)
- body.write("\n")
- body = body.getvalue()
-
- self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
-
- # build the Request
- # We can't use urllib2 since we need to send the Basic
- # auth right with the first request
- schema, netloc, url, params, query, fragments = \
- urlparse.urlparse(self.repository)
- assert not params and not query and not fragments
- if schema == 'http':
- http = httplib.HTTPConnection(netloc)
- elif schema == 'https':
- http = httplib.HTTPSConnection(netloc)
- else:
- raise AssertionError, "unsupported schema "+schema
-
- data = ''
- loglevel = log.INFO
- try:
- http.connect()
- http.putrequest("POST", url)
- http.putheader('Content-type',
- 'multipart/form-data; boundary=%s'%boundary)
- http.putheader('Content-length', str(len(body)))
- http.putheader('Authorization', auth)
- http.endheaders()
- http.send(body)
- except socket.error, e:
- self.announce(str(e), log.ERROR)
- return
-
- r = http.getresponse()
- if r.status == 200:
- self.announce('Server response (%s): %s' % (r.status, r.reason),
- log.INFO)
- else:
- self.announce('Upload failed (%s): %s' % (r.status, r.reason),
- log.ERROR)
- if self.show_response:
- print '-'*75, r.read(), '-'*75
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py
deleted file mode 100755
index 213f7b58..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py
+++ /dev/null
@@ -1,178 +0,0 @@
-# -*- coding: utf-8 -*-
-"""upload_docs
-
-Implements a Distutils 'upload_docs' subcommand (upload documentation to
-PyPI's packages.python.org).
-"""
-
-import os
-import socket
-import zipfile
-import httplib
-import base64
-import urlparse
-import tempfile
-import sys
-
-from distutils import log
-from distutils.errors import DistutilsOptionError
-
-try:
- from distutils.command.upload import upload
-except ImportError:
- from setuptools.command.upload import upload
-
-_IS_PYTHON3 = sys.version > '3'
-
-try:
- bytes
-except NameError:
- bytes = str
-
-def b(str_or_bytes):
- """Return bytes by either encoding the argument as ASCII or simply return
- the argument as-is."""
- if not isinstance(str_or_bytes, bytes):
- return str_or_bytes.encode('ascii')
- else:
- return str_or_bytes
-
-
-class upload_docs(upload):
-
- description = 'Upload documentation to PyPI'
-
- user_options = [
- ('repository=', 'r',
- "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
- ('show-response', None,
- 'display full response text from server'),
- ('upload-dir=', None, 'directory to upload'),
- ]
- boolean_options = upload.boolean_options
-
- def initialize_options(self):
- upload.initialize_options(self)
- self.upload_dir = None
-
- def finalize_options(self):
- upload.finalize_options(self)
- if self.upload_dir is None:
- build = self.get_finalized_command('build')
- self.upload_dir = os.path.join(build.build_base, 'docs')
- self.mkpath(self.upload_dir)
- self.ensure_dirname('upload_dir')
- self.announce('Using upload directory %s' % self.upload_dir)
-
- def create_zipfile(self):
- name = self.distribution.metadata.get_name()
- tmp_dir = tempfile.mkdtemp()
- tmp_file = os.path.join(tmp_dir, "%s.zip" % name)
- zip_file = zipfile.ZipFile(tmp_file, "w")
- for root, dirs, files in os.walk(self.upload_dir):
- if root == self.upload_dir and not files:
- raise DistutilsOptionError(
- "no files found in upload directory '%s'"
- % self.upload_dir)
- for name in files:
- full = os.path.join(root, name)
- relative = root[len(self.upload_dir):].lstrip(os.path.sep)
- dest = os.path.join(relative, name)
- zip_file.write(full, dest)
- zip_file.close()
- return tmp_file
-
- def run(self):
- zip_file = self.create_zipfile()
- self.upload_file(zip_file)
-
- def upload_file(self, filename):
- content = open(filename, 'rb').read()
- meta = self.distribution.metadata
- data = {
- ':action': 'doc_upload',
- 'name': meta.get_name(),
- 'content': (os.path.basename(filename), content),
- }
- # set up the authentication
- credentials = self.username + ':' + self.password
- if _IS_PYTHON3: # base64 only works with bytes in Python 3.
- encoded_creds = base64.encodebytes(credentials.encode('utf8'))
- auth = bytes("Basic ")
- else:
- encoded_creds = base64.encodestring(credentials)
- auth = "Basic "
- auth += encoded_creds.strip()
-
- # Build up the MIME payload for the POST data
- boundary = b('--------------GHSKFJDLGDS7543FJKLFHRE75642756743254')
- sep_boundary = b('\n--') + boundary
- end_boundary = sep_boundary + b('--')
- body = []
- for key, values in data.items():
- # handle multiple entries for the same name
- if type(values) != type([]):
- values = [values]
- for value in values:
- if type(value) is tuple:
- fn = b(';filename="%s"' % value[0])
- value = value[1]
- else:
- fn = b("")
- body.append(sep_boundary)
- body.append(b('\nContent-Disposition: form-data; name="%s"'%key))
- body.append(fn)
- body.append(b("\n\n"))
- body.append(b(value))
- if value and value[-1] == b('\r'):
- body.append(b('\n')) # write an extra newline (lurve Macs)
- body.append(end_boundary)
- body.append(b("\n"))
- body = b('').join(body)
-
- self.announce("Submitting documentation to %s" % (self.repository),
- log.INFO)
-
- # build the Request
- # We can't use urllib2 since we need to send the Basic
- # auth right with the first request
- schema, netloc, url, params, query, fragments = \
- urlparse.urlparse(self.repository)
- assert not params and not query and not fragments
- if schema == 'http':
- conn = httplib.HTTPConnection(netloc)
- elif schema == 'https':
- conn = httplib.HTTPSConnection(netloc)
- else:
- raise AssertionError("unsupported schema "+schema)
-
- data = ''
- loglevel = log.INFO
- try:
- conn.connect()
- conn.putrequest("POST", url)
- conn.putheader('Content-type',
- 'multipart/form-data; boundary=%s'%boundary)
- conn.putheader('Content-length', str(len(body)))
- conn.putheader('Authorization', auth)
- conn.endheaders()
- conn.send(body)
- except socket.error, e:
- self.announce(str(e), log.ERROR)
- return
-
- r = conn.getresponse()
- if r.status == 200:
- self.announce('Server response (%s): %s' % (r.status, r.reason),
- log.INFO)
- elif r.status == 301:
- location = r.getheader('Location')
- if location is None:
- location = 'http://packages.python.org/%s/' % meta.get_name()
- self.announce('Upload successful. Visit %s' % location,
- log.INFO)
- else:
- self.announce('Upload failed (%s): %s' % (r.status, r.reason),
- log.ERROR)
- if self.show_response:
- print '-'*75, r.read(), '-'*75
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py
deleted file mode 100755
index 4b7b3437..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py
+++ /dev/null
@@ -1,246 +0,0 @@
-from __future__ import generators
-import sys, imp, marshal
-from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
-from distutils.version import StrictVersion, LooseVersion
-
-__all__ = [
- 'Require', 'find_module', 'get_module_constant', 'extract_constant'
-]
-
-class Require:
- """A prerequisite to building or installing a distribution"""
-
- def __init__(self,name,requested_version,module,homepage='',
- attribute=None,format=None
- ):
-
- if format is None and requested_version is not None:
- format = StrictVersion
-
- if format is not None:
- requested_version = format(requested_version)
- if attribute is None:
- attribute = '__version__'
-
- self.__dict__.update(locals())
- del self.self
-
-
- def full_name(self):
- """Return full package/distribution name, w/version"""
- if self.requested_version is not None:
- return '%s-%s' % (self.name,self.requested_version)
- return self.name
-
-
- def version_ok(self,version):
- """Is 'version' sufficiently up-to-date?"""
- return self.attribute is None or self.format is None or \
- str(version)<>"unknown" and version >= self.requested_version
-
-
- def get_version(self, paths=None, default="unknown"):
-
- """Get version number of installed module, 'None', or 'default'
-
- Search 'paths' for module. If not found, return 'None'. If found,
- return the extracted version attribute, or 'default' if no version
- attribute was specified, or the value cannot be determined without
- importing the module. The version is formatted according to the
- requirement's version format (if any), unless it is 'None' or the
- supplied 'default'.
- """
-
- if self.attribute is None:
- try:
- f,p,i = find_module(self.module,paths)
- if f: f.close()
- return default
- except ImportError:
- return None
-
- v = get_module_constant(self.module,self.attribute,default,paths)
-
- if v is not None and v is not default and self.format is not None:
- return self.format(v)
-
- return v
-
-
- def is_present(self,paths=None):
- """Return true if dependency is present on 'paths'"""
- return self.get_version(paths) is not None
-
-
- def is_current(self,paths=None):
- """Return true if dependency is present and up-to-date on 'paths'"""
- version = self.get_version(paths)
- if version is None:
- return False
- return self.version_ok(version)
-
-
-def _iter_code(code):
-
- """Yield '(op,arg)' pair for each operation in code object 'code'"""
-
- from array import array
- from dis import HAVE_ARGUMENT, EXTENDED_ARG
-
- bytes = array('b',code.co_code)
- eof = len(code.co_code)
-
- ptr = 0
- extended_arg = 0
-
- while ptr<eof:
-
- op = bytes[ptr]
-
- if op>=HAVE_ARGUMENT:
-
- arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
- ptr += 3
-
- if op==EXTENDED_ARG:
- extended_arg = arg * 65536L
- continue
-
- else:
- arg = None
- ptr += 1
-
- yield op,arg
-
-
-
-
-
-
-
-
-
-
-def find_module(module, paths=None):
- """Just like 'imp.find_module()', but with package support"""
-
- parts = module.split('.')
-
- while parts:
- part = parts.pop(0)
- f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
-
- if kind==PKG_DIRECTORY:
- parts = parts or ['__init__']
- paths = [path]
-
- elif parts:
- raise ImportError("Can't find %r in %s" % (parts,module))
-
- return info
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def get_module_constant(module, symbol, default=-1, paths=None):
-
- """Find 'module' by searching 'paths', and extract 'symbol'
-
- Return 'None' if 'module' does not exist on 'paths', or it does not define
- 'symbol'. If the module defines 'symbol' as a constant, return the
- constant. Otherwise, return 'default'."""
-
- try:
- f, path, (suffix,mode,kind) = find_module(module,paths)
- except ImportError:
- # Module doesn't exist
- return None
-
- try:
- if kind==PY_COMPILED:
- f.read(8) # skip magic & date
- code = marshal.load(f)
- elif kind==PY_FROZEN:
- code = imp.get_frozen_object(module)
- elif kind==PY_SOURCE:
- code = compile(f.read(), path, 'exec')
- else:
- # Not something we can parse; we'll have to import it. :(
- if module not in sys.modules:
- imp.load_module(module,f,path,(suffix,mode,kind))
- return getattr(sys.modules[module],symbol,None)
-
- finally:
- if f:
- f.close()
-
- return extract_constant(code,symbol,default)
-
-
-
-
-
-
-
-
-def extract_constant(code,symbol,default=-1):
- """Extract the constant value of 'symbol' from 'code'
-
- If the name 'symbol' is bound to a constant value by the Python code
- object 'code', return that value. If 'symbol' is bound to an expression,
- return 'default'. Otherwise, return 'None'.
-
- Return value is based on the first assignment to 'symbol'. 'symbol' must
- be a global, or at least a non-"fast" local in the code block. That is,
- only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
- must be present in 'code.co_names'.
- """
-
- if symbol not in code.co_names:
- # name's not there, can't possibly be an assigment
- return None
-
- name_idx = list(code.co_names).index(symbol)
-
- STORE_NAME = 90
- STORE_GLOBAL = 97
- LOAD_CONST = 100
-
- const = default
-
- for op, arg in _iter_code(code):
-
- if op==LOAD_CONST:
- const = code.co_consts[arg]
- elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
- return const
- else:
- const = default
-
-if sys.platform.startswith('java') or sys.platform == 'cli':
- # XXX it'd be better to test assertions about bytecode instead...
- del extract_constant, get_module_constant
- __all__.remove('extract_constant')
- __all__.remove('get_module_constant')
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py
deleted file mode 100755
index fd4ca66b..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py
+++ /dev/null
@@ -1,816 +0,0 @@
-__all__ = ['Distribution']
-
-import re
-from distutils.core import Distribution as _Distribution
-from setuptools.depends import Require
-from setuptools.command.install import install
-from setuptools.command.sdist import sdist
-from setuptools.command.install_lib import install_lib
-from distutils.errors import DistutilsOptionError, DistutilsPlatformError
-from distutils.errors import DistutilsSetupError
-import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd
-import os, distutils.log
-
-def _get_unpatched(cls):
- """Protect against re-patching the distutils if reloaded
-
- Also ensures that no other distutils extension monkeypatched the distutils
- first.
- """
- while cls.__module__.startswith('setuptools'):
- cls, = cls.__bases__
- if not cls.__module__.startswith('distutils'):
- raise AssertionError(
- "distutils has already been patched by %r" % cls
- )
- return cls
-
-_Distribution = _get_unpatched(_Distribution)
-
-sequence = tuple, list
-
-def check_importable(dist, attr, value):
- try:
- ep = pkg_resources.EntryPoint.parse('x='+value)
- assert not ep.extras
- except (TypeError,ValueError,AttributeError,AssertionError):
- raise DistutilsSetupError(
- "%r must be importable 'module:attrs' string (got %r)"
- % (attr,value)
- )
-
-
-def assert_string_list(dist, attr, value):
- """Verify that value is a string list or None"""
- try:
- assert ''.join(value)!=value
- except (TypeError,ValueError,AttributeError,AssertionError):
- raise DistutilsSetupError(
- "%r must be a list of strings (got %r)" % (attr,value)
- )
-
-def check_nsp(dist, attr, value):
- """Verify that namespace packages are valid"""
- assert_string_list(dist,attr,value)
- for nsp in value:
- if not dist.has_contents_for(nsp):
- raise DistutilsSetupError(
- "Distribution contains no modules or packages for " +
- "namespace package %r" % nsp
- )
- if '.' in nsp:
- parent = '.'.join(nsp.split('.')[:-1])
- if parent not in value:
- distutils.log.warn(
- "%r is declared as a package namespace, but %r is not:"
- " please correct this in setup.py", nsp, parent
- )
-
-def check_extras(dist, attr, value):
- """Verify that extras_require mapping is valid"""
- try:
- for k,v in value.items():
- list(pkg_resources.parse_requirements(v))
- except (TypeError,ValueError,AttributeError):
- raise DistutilsSetupError(
- "'extras_require' must be a dictionary whose values are "
- "strings or lists of strings containing valid project/version "
- "requirement specifiers."
- )
-
-
-
-
-def assert_bool(dist, attr, value):
- """Verify that value is True, False, 0, or 1"""
- if bool(value) != value:
- raise DistutilsSetupError(
- "%r must be a boolean value (got %r)" % (attr,value)
- )
-def check_requirements(dist, attr, value):
- """Verify that install_requires is a valid requirements list"""
- try:
- list(pkg_resources.parse_requirements(value))
- except (TypeError,ValueError):
- raise DistutilsSetupError(
- "%r must be a string or list of strings "
- "containing valid project/version requirement specifiers" % (attr,)
- )
-def check_entry_points(dist, attr, value):
- """Verify that entry_points map is parseable"""
- try:
- pkg_resources.EntryPoint.parse_map(value)
- except ValueError, e:
- raise DistutilsSetupError(e)
-
-def check_test_suite(dist, attr, value):
- if not isinstance(value,basestring):
- raise DistutilsSetupError("test_suite must be a string")
-
-def check_package_data(dist, attr, value):
- """Verify that value is a dictionary of package names to glob lists"""
- if isinstance(value,dict):
- for k,v in value.items():
- if not isinstance(k,str): break
- try: iter(v)
- except TypeError:
- break
- else:
- return
- raise DistutilsSetupError(
- attr+" must be a dictionary mapping package names to lists of "
- "wildcard patterns"
- )
-
-class Distribution(_Distribution):
- """Distribution with support for features, tests, and package data
-
- This is an enhanced version of 'distutils.dist.Distribution' that
- effectively adds the following new optional keyword arguments to 'setup()':
-
- 'install_requires' -- a string or sequence of strings specifying project
- versions that the distribution requires when installed, in the format
- used by 'pkg_resources.require()'. They will be installed
- automatically when the package is installed. If you wish to use
- packages that are not available in PyPI, or want to give your users an
- alternate download location, you can add a 'find_links' option to the
- '[easy_install]' section of your project's 'setup.cfg' file, and then
- setuptools will scan the listed web pages for links that satisfy the
- requirements.
-
- 'extras_require' -- a dictionary mapping names of optional "extras" to the
- additional requirement(s) that using those extras incurs. For example,
- this::
-
- extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
-
- indicates that the distribution can optionally provide an extra
- capability called "reST", but it can only be used if docutils and
- reSTedit are installed. If the user installs your package using
- EasyInstall and requests one of your extras, the corresponding
- additional requirements will be installed if needed.
-
- 'features' -- a dictionary mapping option names to 'setuptools.Feature'
- objects. Features are a portion of the distribution that can be
- included or excluded based on user options, inter-feature dependencies,
- and availability on the current system. Excluded features are omitted
- from all setup commands, including source and binary distributions, so
- you can create multiple distributions from the same source tree.
- Feature names should be valid Python identifiers, except that they may
- contain the '-' (minus) sign. Features can be included or excluded
- via the command line options '--with-X' and '--without-X', where 'X' is
- the name of the feature. Whether a feature is included by default, and
- whether you are allowed to control this from the command line, is
- determined by the Feature object. See the 'Feature' class for more
- information.
-
- 'test_suite' -- the name of a test suite to run for the 'test' command.
- If the user runs 'python setup.py test', the package will be installed,
- and the named test suite will be run. The format is the same as
- would be used on a 'unittest.py' command line. That is, it is the
- dotted name of an object to import and call to generate a test suite.
-
- 'package_data' -- a dictionary mapping package names to lists of filenames
- or globs to use to find data files contained in the named packages.
- If the dictionary has filenames or globs listed under '""' (the empty
- string), those names will be searched for in every package, in addition
- to any names for the specific package. Data files found using these
- names/globs will be installed along with the package, in the same
- location as the package. Note that globs are allowed to reference
- the contents of non-package subdirectories, as long as you use '/' as
- a path separator. (Globs are automatically converted to
- platform-specific paths at runtime.)
-
- In addition to these new keywords, this class also has several new methods
- for manipulating the distribution's contents. For example, the 'include()'
- and 'exclude()' methods can be thought of as in-place add and subtract
- commands that add or remove packages, modules, extensions, and so on from
- the distribution. They are used by the feature subsystem to configure the
- distribution for the included and excluded features.
- """
-
- _patched_dist = None
-
- def patch_missing_pkg_info(self, attrs):
- # Fake up a replacement for the data that would normally come from
- # PKG-INFO, but which might not yet be built if this is a fresh
- # checkout.
- #
- if not attrs or 'name' not in attrs or 'version' not in attrs:
- return
- key = pkg_resources.safe_name(str(attrs['name'])).lower()
- dist = pkg_resources.working_set.by_key.get(key)
- if dist is not None and not dist.has_metadata('PKG-INFO'):
- dist._version = pkg_resources.safe_version(str(attrs['version']))
- self._patched_dist = dist
-
- def __init__ (self, attrs=None):
- have_package_data = hasattr(self, "package_data")
- if not have_package_data:
- self.package_data = {}
- self.require_features = []
- self.features = {}
- self.dist_files = []
- self.src_root = attrs and attrs.pop("src_root", None)
- self.patch_missing_pkg_info(attrs)
- # Make sure we have any eggs needed to interpret 'attrs'
- if attrs is not None:
- self.dependency_links = attrs.pop('dependency_links', [])
- assert_string_list(self,'dependency_links',self.dependency_links)
- if attrs and 'setup_requires' in attrs:
- self.fetch_build_eggs(attrs.pop('setup_requires'))
- for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
- if not hasattr(self,ep.name):
- setattr(self,ep.name,None)
- _Distribution.__init__(self,attrs)
- if isinstance(self.metadata.version, (int,long,float)):
- # Some people apparently take "version number" too literally :)
- self.metadata.version = str(self.metadata.version)
-
- def parse_command_line(self):
- """Process features after parsing command line options"""
- result = _Distribution.parse_command_line(self)
- if self.features:
- self._finalize_features()
- return result
-
- def _feature_attrname(self,name):
- """Convert feature name to corresponding option attribute name"""
- return 'with_'+name.replace('-','_')
-
- def fetch_build_eggs(self, requires):
- """Resolve pre-setup requirements"""
- from pkg_resources import working_set, parse_requirements
- for dist in working_set.resolve(
- parse_requirements(requires), installer=self.fetch_build_egg
- ):
- working_set.add(dist)
-
- def finalize_options(self):
- _Distribution.finalize_options(self)
- if self.features:
- self._set_global_opts_from_features()
-
- for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
- value = getattr(self,ep.name,None)
- if value is not None:
- ep.require(installer=self.fetch_build_egg)
- ep.load()(self, ep.name, value)
- if getattr(self, 'convert_2to3_doctests', None):
- # XXX may convert to set here when we can rely on set being builtin
- self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests]
- else:
- self.convert_2to3_doctests = []
-
- def fetch_build_egg(self, req):
- """Fetch an egg needed for building"""
- try:
- cmd = self._egg_fetcher
- except AttributeError:
- from setuptools.command.easy_install import easy_install
- dist = self.__class__({'script_args':['easy_install']})
- dist.parse_config_files()
- opts = dist.get_option_dict('easy_install')
- keep = (
- 'find_links', 'site_dirs', 'index_url', 'optimize',
- 'site_dirs', 'allow_hosts'
- )
- for key in opts.keys():
- if key not in keep:
- del opts[key] # don't use any other settings
- if self.dependency_links:
- links = self.dependency_links[:]
- if 'find_links' in opts:
- links = opts['find_links'][1].split() + links
- opts['find_links'] = ('setup', links)
- cmd = easy_install(
- dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
- always_copy=False, build_directory=None, editable=False,
- upgrade=False, multi_version=True, no_report = True
- )
- cmd.ensure_finalized()
- self._egg_fetcher = cmd
- return cmd.easy_install(req)
-
- def _set_global_opts_from_features(self):
- """Add --with-X/--without-X options based on optional features"""
-
- go = []
- no = self.negative_opt.copy()
-
- for name,feature in self.features.items():
- self._set_feature(name,None)
- feature.validate(self)
-
- if feature.optional:
- descr = feature.description
- incdef = ' (default)'
- excdef=''
- if not feature.include_by_default():
- excdef, incdef = incdef, excdef
-
- go.append(('with-'+name, None, 'include '+descr+incdef))
- go.append(('without-'+name, None, 'exclude '+descr+excdef))
- no['without-'+name] = 'with-'+name
-
- self.global_options = self.feature_options = go + self.global_options
- self.negative_opt = self.feature_negopt = no
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def _finalize_features(self):
- """Add/remove features and resolve dependencies between them"""
-
- # First, flag all the enabled items (and thus their dependencies)
- for name,feature in self.features.items():
- enabled = self.feature_is_included(name)
- if enabled or (enabled is None and feature.include_by_default()):
- feature.include_in(self)
- self._set_feature(name,1)
-
- # Then disable the rest, so that off-by-default features don't
- # get flagged as errors when they're required by an enabled feature
- for name,feature in self.features.items():
- if not self.feature_is_included(name):
- feature.exclude_from(self)
- self._set_feature(name,0)
-
-
- def get_command_class(self, command):
- """Pluggable version of get_command_class()"""
- if command in self.cmdclass:
- return self.cmdclass[command]
-
- for ep in pkg_resources.iter_entry_points('distutils.commands',command):
- ep.require(installer=self.fetch_build_egg)
- self.cmdclass[command] = cmdclass = ep.load()
- return cmdclass
- else:
- return _Distribution.get_command_class(self, command)
-
- def print_commands(self):
- for ep in pkg_resources.iter_entry_points('distutils.commands'):
- if ep.name not in self.cmdclass:
- cmdclass = ep.load(False) # don't require extras, we're not running
- self.cmdclass[ep.name] = cmdclass
- return _Distribution.print_commands(self)
-
-
-
-
-
- def _set_feature(self,name,status):
- """Set feature's inclusion status"""
- setattr(self,self._feature_attrname(name),status)
-
- def feature_is_included(self,name):
- """Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
- return getattr(self,self._feature_attrname(name))
-
- def include_feature(self,name):
- """Request inclusion of feature named 'name'"""
-
- if self.feature_is_included(name)==0:
- descr = self.features[name].description
- raise DistutilsOptionError(
- descr + " is required, but was excluded or is not available"
- )
- self.features[name].include_in(self)
- self._set_feature(name,1)
-
- def include(self,**attrs):
- """Add items to distribution that are named in keyword arguments
-
- For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
- the distribution's 'py_modules' attribute, if it was not already
- there.
-
- Currently, this method only supports inclusion for attributes that are
- lists or tuples. If you need to add support for adding to other
- attributes in this or a subclass, you can add an '_include_X' method,
- where 'X' is the name of the attribute. The method will be called with
- the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
- will try to call 'dist._include_foo({"bar":"baz"})', which can then
- handle whatever special inclusion logic is needed.
- """
- for k,v in attrs.items():
- include = getattr(self, '_include_'+k, None)
- if include:
- include(v)
- else:
- self._include_misc(k,v)
-
- def exclude_package(self,package):
- """Remove packages, modules, and extensions in named package"""
-
- pfx = package+'.'
- if self.packages:
- self.packages = [
- p for p in self.packages
- if p != package and not p.startswith(pfx)
- ]
-
- if self.py_modules:
- self.py_modules = [
- p for p in self.py_modules
- if p != package and not p.startswith(pfx)
- ]
-
- if self.ext_modules:
- self.ext_modules = [
- p for p in self.ext_modules
- if p.name != package and not p.name.startswith(pfx)
- ]
-
-
- def has_contents_for(self,package):
- """Return true if 'exclude_package(package)' would do something"""
-
- pfx = package+'.'
-
- for p in self.iter_distribution_names():
- if p==package or p.startswith(pfx):
- return True
-
-
-
-
-
-
-
-
-
-
- def _exclude_misc(self,name,value):
- """Handle 'exclude()' for list/tuple attrs without a special handler"""
- if not isinstance(value,sequence):
- raise DistutilsSetupError(
- "%s: setting must be a list or tuple (%r)" % (name, value)
- )
- try:
- old = getattr(self,name)
- except AttributeError:
- raise DistutilsSetupError(
- "%s: No such distribution setting" % name
- )
- if old is not None and not isinstance(old,sequence):
- raise DistutilsSetupError(
- name+": this setting cannot be changed via include/exclude"
- )
- elif old:
- setattr(self,name,[item for item in old if item not in value])
-
- def _include_misc(self,name,value):
- """Handle 'include()' for list/tuple attrs without a special handler"""
-
- if not isinstance(value,sequence):
- raise DistutilsSetupError(
- "%s: setting must be a list (%r)" % (name, value)
- )
- try:
- old = getattr(self,name)
- except AttributeError:
- raise DistutilsSetupError(
- "%s: No such distribution setting" % name
- )
- if old is None:
- setattr(self,name,value)
- elif not isinstance(old,sequence):
- raise DistutilsSetupError(
- name+": this setting cannot be changed via include/exclude"
- )
- else:
- setattr(self,name,old+[item for item in value if item not in old])
-
- def exclude(self,**attrs):
- """Remove items from distribution that are named in keyword arguments
-
- For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
- the distribution's 'py_modules' attribute. Excluding packages uses
- the 'exclude_package()' method, so all of the package's contained
- packages, modules, and extensions are also excluded.
-
- Currently, this method only supports exclusion from attributes that are
- lists or tuples. If you need to add support for excluding from other
- attributes in this or a subclass, you can add an '_exclude_X' method,
- where 'X' is the name of the attribute. The method will be called with
- the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
- will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
- handle whatever special exclusion logic is needed.
- """
- for k,v in attrs.items():
- exclude = getattr(self, '_exclude_'+k, None)
- if exclude:
- exclude(v)
- else:
- self._exclude_misc(k,v)
-
- def _exclude_packages(self,packages):
- if not isinstance(packages,sequence):
- raise DistutilsSetupError(
- "packages: setting must be a list or tuple (%r)" % (packages,)
- )
- map(self.exclude_package, packages)
-
-
-
-
-
-
-
-
-
-
-
-
- def _parse_command_opts(self, parser, args):
- # Remove --with-X/--without-X options when processing command args
- self.global_options = self.__class__.global_options
- self.negative_opt = self.__class__.negative_opt
-
- # First, expand any aliases
- command = args[0]
- aliases = self.get_option_dict('aliases')
- while command in aliases:
- src,alias = aliases[command]
- del aliases[command] # ensure each alias can expand only once!
- import shlex
- args[:1] = shlex.split(alias,True)
- command = args[0]
-
- nargs = _Distribution._parse_command_opts(self, parser, args)
-
- # Handle commands that want to consume all remaining arguments
- cmd_class = self.get_command_class(command)
- if getattr(cmd_class,'command_consumes_arguments',None):
- self.get_option_dict(command)['args'] = ("command line", nargs)
- if nargs is not None:
- return []
-
- return nargs
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def get_cmdline_options(self):
- """Return a '{cmd: {opt:val}}' map of all command-line options
-
- Option names are all long, but do not include the leading '--', and
- contain dashes rather than underscores. If the option doesn't take
- an argument (e.g. '--quiet'), the 'val' is 'None'.
-
- Note that options provided by config files are intentionally excluded.
- """
-
- d = {}
-
- for cmd,opts in self.command_options.items():
-
- for opt,(src,val) in opts.items():
-
- if src != "command line":
- continue
-
- opt = opt.replace('_','-')
-
- if val==0:
- cmdobj = self.get_command_obj(cmd)
- neg_opt = self.negative_opt.copy()
- neg_opt.update(getattr(cmdobj,'negative_opt',{}))
- for neg,pos in neg_opt.items():
- if pos==opt:
- opt=neg
- val=None
- break
- else:
- raise AssertionError("Shouldn't be able to get here")
-
- elif val==1:
- val = None
-
- d.setdefault(cmd,{})[opt] = val
-
- return d
-
-
- def iter_distribution_names(self):
- """Yield all packages, modules, and extension names in distribution"""
-
- for pkg in self.packages or ():
- yield pkg
-
- for module in self.py_modules or ():
- yield module
-
- for ext in self.ext_modules or ():
- if isinstance(ext,tuple):
- name, buildinfo = ext
- else:
- name = ext.name
- if name.endswith('module'):
- name = name[:-6]
- yield name
-
-# Install it throughout the distutils
-for module in distutils.dist, distutils.core, distutils.cmd:
- module.Distribution = Distribution
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class Feature:
- """A subset of the distribution that can be excluded if unneeded/wanted
-
- Features are created using these keyword arguments:
-
- 'description' -- a short, human readable description of the feature, to
- be used in error messages, and option help messages.
-
- 'standard' -- if true, the feature is included by default if it is
- available on the current system. Otherwise, the feature is only
- included if requested via a command line '--with-X' option, or if
- another included feature requires it. The default setting is 'False'.
-
- 'available' -- if true, the feature is available for installation on the
- current system. The default setting is 'True'.
-
- 'optional' -- if true, the feature's inclusion can be controlled from the
- command line, using the '--with-X' or '--without-X' options. If
- false, the feature's inclusion status is determined automatically,
- based on 'availabile', 'standard', and whether any other feature
- requires it. The default setting is 'True'.
-
- 'require_features' -- a string or sequence of strings naming features
- that should also be included if this feature is included. Defaults to
- empty list. May also contain 'Require' objects that should be
- added/removed from the distribution.
-
- 'remove' -- a string or list of strings naming packages to be removed
- from the distribution if this feature is *not* included. If the
- feature *is* included, this argument is ignored. This argument exists
- to support removing features that "crosscut" a distribution, such as
- defining a 'tests' feature that removes all the 'tests' subpackages
- provided by other features. The default for this argument is an empty
- list. (Note: the named package(s) or modules must exist in the base
- distribution when the 'setup()' function is initially called.)
-
- other keywords -- any other keyword arguments are saved, and passed to
- the distribution's 'include()' and 'exclude()' methods when the
- feature is included or excluded, respectively. So, for example, you
- could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
- added or removed from the distribution as appropriate.
-
- A feature must include at least one 'requires', 'remove', or other
- keyword argument. Otherwise, it can't affect the distribution in any way.
- Note also that you can subclass 'Feature' to create your own specialized
- feature types that modify the distribution in other ways when included or
- excluded. See the docstrings for the various methods here for more detail.
- Aside from the methods, the only feature attributes that distributions look
- at are 'description' and 'optional'.
- """
- def __init__(self, description, standard=False, available=True,
- optional=True, require_features=(), remove=(), **extras
- ):
-
- self.description = description
- self.standard = standard
- self.available = available
- self.optional = optional
- if isinstance(require_features,(str,Require)):
- require_features = require_features,
-
- self.require_features = [
- r for r in require_features if isinstance(r,str)
- ]
- er = [r for r in require_features if not isinstance(r,str)]
- if er: extras['require_features'] = er
-
- if isinstance(remove,str):
- remove = remove,
- self.remove = remove
- self.extras = extras
-
- if not remove and not require_features and not extras:
- raise DistutilsSetupError(
- "Feature %s: must define 'require_features', 'remove', or at least one"
- " of 'packages', 'py_modules', etc."
- )
-
- def include_by_default(self):
- """Should this feature be included by default?"""
- return self.available and self.standard
-
- def include_in(self,dist):
-
- """Ensure feature and its requirements are included in distribution
-
- You may override this in a subclass to perform additional operations on
- the distribution. Note that this method may be called more than once
- per feature, and so should be idempotent.
-
- """
-
- if not self.available:
- raise DistutilsPlatformError(
- self.description+" is required,"
- "but is not available on this platform"
- )
-
- dist.include(**self.extras)
-
- for f in self.require_features:
- dist.include_feature(f)
-
-
-
- def exclude_from(self,dist):
-
- """Ensure feature is excluded from distribution
-
- You may override this in a subclass to perform additional operations on
- the distribution. This method will be called at most once per
- feature, and only after all included features have been asked to
- include themselves.
- """
-
- dist.exclude(**self.extras)
-
- if self.remove:
- for item in self.remove:
- dist.exclude_package(item)
-
-
-
- def validate(self,dist):
-
- """Verify that feature makes sense in context of distribution
-
- This method is called by the distribution just before it parses its
- command line. It checks to ensure that the 'remove' attribute, if any,
- contains only valid package/module names that are present in the base
- distribution when 'setup()' is called. You may override it in a
- subclass to perform any other required validation of the feature
- against a target distribution.
- """
-
- for item in self.remove:
- if not dist.has_contents_for(item):
- raise DistutilsSetupError(
- "%s wants to be able to remove %s, but the distribution"
- " doesn't contain any packages or modules under %s"
- % (self.description, item, item)
- )
-
-
-
-def check_packages(dist, attr, value):
- for pkgname in value:
- if not re.match(r'\w+(\.\w+)*', pkgname):
- distutils.log.warn(
- "WARNING: %r not a valid package name; please use only"
- ".-separated package names in setup.py", pkgname
- )
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py
deleted file mode 100755
index d186c7a2..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from distutils.core import Extension as _Extension
-from setuptools.dist import _get_unpatched
-_Extension = _get_unpatched(_Extension)
-
-try:
- from Pyrex.Distutils.build_ext import build_ext
-except ImportError:
- have_pyrex = False
-else:
- have_pyrex = True
-
-
-class Extension(_Extension):
- """Extension that uses '.c' files in place of '.pyx' files"""
-
- if not have_pyrex:
- # convert .pyx extensions to .c
- def __init__(self,*args,**kw):
- _Extension.__init__(self,*args,**kw)
- sources = []
- for s in self.sources:
- if s.endswith('.pyx'):
- sources.append(s[:-3]+'c')
- else:
- sources.append(s)
- self.sources = sources
-
-class Library(Extension):
- """Just like a regular Extension, but built as a library instead"""
-
-import sys, distutils.core, distutils.extension
-distutils.core.Extension = Extension
-distutils.extension.Extension = Extension
-if 'distutils.command.build_ext' in sys.modules:
- sys.modules['distutils.command.build_ext'].Extension = Extension
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exe b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exe
deleted file mode 100644
index 474838d5..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exe
+++ /dev/null
Binary files differ
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py
deleted file mode 100755
index 1d467f78..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py
+++ /dev/null
@@ -1,830 +0,0 @@
-"""PyPI and direct package downloading"""
-import sys, os.path, re, urlparse, urllib2, shutil, random, socket, cStringIO
-import httplib
-from pkg_resources import *
-from distutils import log
-from distutils.errors import DistutilsError
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
-from fnmatch import translate
-
-EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
-HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
-# this is here to fix emacs' cruddy broken syntax highlighting
-PYPI_MD5 = re.compile(
- '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)'
- 'href="[^?]+\?:action=show_md5&amp;digest=([0-9a-f]{32})">md5</a>\\)'
-)
-URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
-EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
-
-__all__ = [
- 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
- 'interpret_distro_name',
-]
-
-_SOCKET_TIMEOUT = 15
-
-def parse_bdist_wininst(name):
- """Return (base,pyversion) or (None,None) for possible .exe name"""
-
- lower = name.lower()
- base, py_ver = None, None
-
- if lower.endswith('.exe'):
- if lower.endswith('.win32.exe'):
- base = name[:-10]
- elif lower.startswith('.win32-py',-16):
- py_ver = name[-7:-4]
- base = name[:-16]
-
- return base,py_ver
-
-def egg_info_for_url(url):
- scheme, server, path, parameters, query, fragment = urlparse.urlparse(url)
- base = urllib2.unquote(path.split('/')[-1])
- if '#' in base: base, fragment = base.split('#',1)
- return base,fragment
-
-def distros_for_url(url, metadata=None):
- """Yield egg or source distribution objects that might be found at a URL"""
- base, fragment = egg_info_for_url(url)
- for dist in distros_for_location(url, base, metadata): yield dist
- if fragment:
- match = EGG_FRAGMENT.match(fragment)
- if match:
- for dist in interpret_distro_name(
- url, match.group(1), metadata, precedence = CHECKOUT_DIST
- ):
- yield dist
-
-def distros_for_location(location, basename, metadata=None):
- """Yield egg or source distribution objects based on basename"""
- if basename.endswith('.egg.zip'):
- basename = basename[:-4] # strip the .zip
- if basename.endswith('.egg') and '-' in basename:
- # only one, unambiguous interpretation
- return [Distribution.from_location(location, basename, metadata)]
-
- if basename.endswith('.exe'):
- win_base, py_ver = parse_bdist_wininst(basename)
- if win_base is not None:
- return interpret_distro_name(
- location, win_base, metadata, py_ver, BINARY_DIST, "win32"
- )
-
- # Try source distro extensions (.zip, .tgz, etc.)
- #
- for ext in EXTENSIONS:
- if basename.endswith(ext):
- basename = basename[:-len(ext)]
- return interpret_distro_name(location, basename, metadata)
- return [] # no extension matched
-
-def distros_for_filename(filename, metadata=None):
- """Yield possible egg or source distribution objects based on a filename"""
- return distros_for_location(
- normalize_path(filename), os.path.basename(filename), metadata
- )
-
-
-def interpret_distro_name(location, basename, metadata,
- py_version=None, precedence=SOURCE_DIST, platform=None
-):
- """Generate alternative interpretations of a source distro name
-
- Note: if `location` is a filesystem filename, you should call
- ``pkg_resources.normalize_path()`` on it before passing it to this
- routine!
- """
- # Generate alternative interpretations of a source distro name
- # Because some packages are ambiguous as to name/versions split
- # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
- # So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
- # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
- # the spurious interpretations should be ignored, because in the event
- # there's also an "adns" package, the spurious "python-1.1.0" version will
- # compare lower than any numeric version number, and is therefore unlikely
- # to match a request for it. It's still a potential problem, though, and
- # in the long run PyPI and the distutils should go for "safe" names and
- # versions in distribution archive names (sdist and bdist).
-
- parts = basename.split('-')
- if not py_version:
- for i,p in enumerate(parts[2:]):
- if len(p)==5 and p.startswith('py2.'):
- return # It's a bdist_dumb, not an sdist -- bail out
-
- for p in range(1,len(parts)+1):
- yield Distribution(
- location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
- py_version=py_version, precedence = precedence,
- platform = platform
- )
-
-REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
-# this line is here to fix emacs' cruddy broken syntax highlighting
-
-def find_external_links(url, page):
- """Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
-
- for match in REL.finditer(page):
- tag, rel = match.groups()
- rels = map(str.strip, rel.lower().split(','))
- if 'homepage' in rels or 'download' in rels:
- for match in HREF.finditer(tag):
- yield urlparse.urljoin(url, htmldecode(match.group(1)))
-
- for tag in ("<th>Home Page", "<th>Download URL"):
- pos = page.find(tag)
- if pos!=-1:
- match = HREF.search(page,pos)
- if match:
- yield urlparse.urljoin(url, htmldecode(match.group(1)))
-
-user_agent = "Python-urllib/%s distribute/%s" % (
- sys.version[:3], require('distribute')[0].version
-)
-
-
-class PackageIndex(Environment):
- """A distribution index that scans web pages for download URLs"""
-
- def __init__(self, index_url="http://pypi.python.org/simple", hosts=('*',),
- *args, **kw
- ):
- Environment.__init__(self,*args,**kw)
- self.index_url = index_url + "/"[:not index_url.endswith('/')]
- self.scanned_urls = {}
- self.fetched_urls = {}
- self.package_pages = {}
- self.allows = re.compile('|'.join(map(translate,hosts))).match
- self.to_scan = []
-
-
-
- def process_url(self, url, retrieve=False):
- """Evaluate a URL as a possible download, and maybe retrieve it"""
- if url in self.scanned_urls and not retrieve:
- return
- self.scanned_urls[url] = True
- if not URL_SCHEME(url):
- self.process_filename(url)
- return
- else:
- dists = list(distros_for_url(url))
- if dists:
- if not self.url_ok(url):
- return
- self.debug("Found link: %s", url)
-
- if dists or not retrieve or url in self.fetched_urls:
- map(self.add, dists)
- return # don't need the actual page
-
- if not self.url_ok(url):
- self.fetched_urls[url] = True
- return
-
- self.info("Reading %s", url)
- f = self.open_url(url, "Download error: %s -- Some packages may not be found!")
- if f is None: return
- self.fetched_urls[url] = self.fetched_urls[f.url] = True
-
- if 'html' not in f.headers.get('content-type', '').lower():
- f.close() # not html, we can't process it
- return
-
- base = f.url # handle redirects
- page = f.read()
- if sys.version_info >= (3,):
- charset = f.headers.get_param('charset') or 'latin-1'
- page = page.decode(charset, "ignore")
- f.close()
- for match in HREF.finditer(page):
- link = urlparse.urljoin(base, htmldecode(match.group(1)))
- self.process_url(link)
- if url.startswith(self.index_url) and getattr(f,'code',None)!=404:
- page = self.process_index(url, page)
-
- def process_filename(self, fn, nested=False):
- # process filenames or directories
- if not os.path.exists(fn):
- self.warn("Not found: %s", fn)
- return
-
- if os.path.isdir(fn) and not nested:
- path = os.path.realpath(fn)
- for item in os.listdir(path):
- self.process_filename(os.path.join(path,item), True)
-
- dists = distros_for_filename(fn)
- if dists:
- self.debug("Found: %s", fn)
- map(self.add, dists)
-
- def url_ok(self, url, fatal=False):
- s = URL_SCHEME(url)
- if (s and s.group(1).lower()=='file') or self.allows(urlparse.urlparse(url)[1]):
- return True
- msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n"
- if fatal:
- raise DistutilsError(msg % url)
- else:
- self.warn(msg, url)
-
- def scan_egg_links(self, search_path):
- for item in search_path:
- if os.path.isdir(item):
- for entry in os.listdir(item):
- if entry.endswith('.egg-link'):
- self.scan_egg_link(item, entry)
-
- def scan_egg_link(self, path, entry):
- lines = filter(None, map(str.strip, open(os.path.join(path, entry))))
- if len(lines)==2:
- for dist in find_distributions(os.path.join(path, lines[0])):
- dist.location = os.path.join(path, *lines)
- dist.precedence = SOURCE_DIST
- self.add(dist)
-
- def process_index(self,url,page):
- """Process the contents of a PyPI page"""
- def scan(link):
- # Process a URL to see if it's for a package page
- if link.startswith(self.index_url):
- parts = map(
- urllib2.unquote, link[len(self.index_url):].split('/')
- )
- if len(parts)==2 and '#' not in parts[1]:
- # it's a package page, sanitize and index it
- pkg = safe_name(parts[0])
- ver = safe_version(parts[1])
- self.package_pages.setdefault(pkg.lower(),{})[link] = True
- return to_filename(pkg), to_filename(ver)
- return None, None
-
- # process an index page into the package-page index
- for match in HREF.finditer(page):
- try:
- scan( urlparse.urljoin(url, htmldecode(match.group(1))) )
- except ValueError:
- pass
-
- pkg, ver = scan(url) # ensure this page is in the page index
- if pkg:
- # process individual package page
- for new_url in find_external_links(url, page):
- # Process the found URL
- base, frag = egg_info_for_url(new_url)
- if base.endswith('.py') and not frag:
- if ver:
- new_url+='#egg=%s-%s' % (pkg,ver)
- else:
- self.need_version_info(url)
- self.scan_url(new_url)
-
- return PYPI_MD5.sub(
- lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
- )
- else:
- return "" # no sense double-scanning non-package pages
-
-
-
- def need_version_info(self, url):
- self.scan_all(
- "Page at %s links to .py file(s) without version info; an index "
- "scan is required.", url
- )
-
- def scan_all(self, msg=None, *args):
- if self.index_url not in self.fetched_urls:
- if msg: self.warn(msg,*args)
- self.info(
- "Scanning index of all packages (this may take a while)"
- )
- self.scan_url(self.index_url)
-
- def find_packages(self, requirement):
- self.scan_url(self.index_url + requirement.unsafe_name+'/')
-
- if not self.package_pages.get(requirement.key):
- # Fall back to safe version of the name
- self.scan_url(self.index_url + requirement.project_name+'/')
-
- if not self.package_pages.get(requirement.key):
- # We couldn't find the target package, so search the index page too
- self.not_found_in_index(requirement)
-
- for url in list(self.package_pages.get(requirement.key,())):
- # scan each page that might be related to the desired package
- self.scan_url(url)
-
- def obtain(self, requirement, installer=None):
- self.prescan(); self.find_packages(requirement)
- for dist in self[requirement.key]:
- if dist in requirement:
- return dist
- self.debug("%s does not match %s", requirement, dist)
- return super(PackageIndex, self).obtain(requirement,installer)
-
-
-
-
-
- def check_md5(self, cs, info, filename, tfp):
- if re.match('md5=[0-9a-f]{32}$', info):
- self.debug("Validating md5 checksum for %s", filename)
- if cs.hexdigest()<>info[4:]:
- tfp.close()
- os.unlink(filename)
- raise DistutilsError(
- "MD5 validation failed for "+os.path.basename(filename)+
- "; possible download problem?"
- )
-
- def add_find_links(self, urls):
- """Add `urls` to the list that will be prescanned for searches"""
- for url in urls:
- if (
- self.to_scan is None # if we have already "gone online"
- or not URL_SCHEME(url) # or it's a local file/directory
- or url.startswith('file:')
- or list(distros_for_url(url)) # or a direct package link
- ):
- # then go ahead and process it now
- self.scan_url(url)
- else:
- # otherwise, defer retrieval till later
- self.to_scan.append(url)
-
- def prescan(self):
- """Scan urls scheduled for prescanning (e.g. --find-links)"""
- if self.to_scan:
- map(self.scan_url, self.to_scan)
- self.to_scan = None # from now on, go ahead and process immediately
-
- def not_found_in_index(self, requirement):
- if self[requirement.key]: # we've seen at least one distro
- meth, msg = self.info, "Couldn't retrieve index page for %r"
- else: # no distros seen for this name, might be misspelled
- meth, msg = (self.warn,
- "Couldn't find index page for %r (maybe misspelled?)")
- meth(msg, requirement.unsafe_name)
- self.scan_all()
-
- def download(self, spec, tmpdir):
- """Locate and/or download `spec` to `tmpdir`, returning a local path
-
- `spec` may be a ``Requirement`` object, or a string containing a URL,
- an existing local filename, or a project/version requirement spec
- (i.e. the string form of a ``Requirement`` object). If it is the URL
- of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
- that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
- automatically created alongside the downloaded file.
-
- If `spec` is a ``Requirement`` object or a string containing a
- project/version requirement spec, this method returns the location of
- a matching distribution (possibly after downloading it to `tmpdir`).
- If `spec` is a locally existing file or directory name, it is simply
- returned unchanged. If `spec` is a URL, it is downloaded to a subpath
- of `tmpdir`, and the local filename is returned. Various errors may be
- raised if a problem occurs during downloading.
- """
- if not isinstance(spec,Requirement):
- scheme = URL_SCHEME(spec)
- if scheme:
- # It's a url, download it to tmpdir
- found = self._download_url(scheme.group(1), spec, tmpdir)
- base, fragment = egg_info_for_url(spec)
- if base.endswith('.py'):
- found = self.gen_setup(found,fragment,tmpdir)
- return found
- elif os.path.exists(spec):
- # Existing file or directory, just return it
- return spec
- else:
- try:
- spec = Requirement.parse(spec)
- except ValueError:
- raise DistutilsError(
- "Not a URL, existing file, or requirement spec: %r" %
- (spec,)
- )
- return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
-
-
- def fetch_distribution(self,
- requirement, tmpdir, force_scan=False, source=False, develop_ok=False,
- local_index=None
- ):
- """Obtain a distribution suitable for fulfilling `requirement`
-
- `requirement` must be a ``pkg_resources.Requirement`` instance.
- If necessary, or if the `force_scan` flag is set, the requirement is
- searched for in the (online) package index as well as the locally
- installed packages. If a distribution matching `requirement` is found,
- the returned distribution's ``location`` is the value you would have
- gotten from calling the ``download()`` method with the matching
- distribution's URL or filename. If no matching distribution is found,
- ``None`` is returned.
-
- If the `source` flag is set, only source distributions and source
- checkout links will be considered. Unless the `develop_ok` flag is
- set, development and system eggs (i.e., those using the ``.egg-info``
- format) will be ignored.
- """
-
- # process a Requirement
- self.info("Searching for %s", requirement)
- skipped = {}
- dist = None
-
- def find(req, env=None):
- if env is None:
- env = self
- # Find a matching distribution; may be called more than once
-
- for dist in env[req.key]:
-
- if dist.precedence==DEVELOP_DIST and not develop_ok:
- if dist not in skipped:
- self.warn("Skipping development or system egg: %s",dist)
- skipped[dist] = 1
- continue
-
- if dist in req and (dist.precedence<=SOURCE_DIST or not source):
- self.info("Best match: %s", dist)
- return dist.clone(
- location=self.download(dist.location, tmpdir)
- )
-
- if force_scan:
- self.prescan()
- self.find_packages(requirement)
- dist = find(requirement)
-
- if local_index is not None:
- dist = dist or find(requirement, local_index)
-
- if dist is None and self.to_scan is not None:
- self.prescan()
- dist = find(requirement)
-
- if dist is None and not force_scan:
- self.find_packages(requirement)
- dist = find(requirement)
-
- if dist is None:
- self.warn(
- "No local packages or download links found for %s%s",
- (source and "a source distribution of " or ""),
- requirement,
- )
- return dist
-
- def fetch(self, requirement, tmpdir, force_scan=False, source=False):
- """Obtain a file suitable for fulfilling `requirement`
-
- DEPRECATED; use the ``fetch_distribution()`` method now instead. For
- backward compatibility, this routine is identical but returns the
- ``location`` of the downloaded distribution instead of a distribution
- object.
- """
- dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
- if dist is not None:
- return dist.location
- return None
-
-
-
-
-
-
-
-
- def gen_setup(self, filename, fragment, tmpdir):
- match = EGG_FRAGMENT.match(fragment)
- dists = match and [d for d in
- interpret_distro_name(filename, match.group(1), None) if d.version
- ] or []
-
- if len(dists)==1: # unambiguous ``#egg`` fragment
- basename = os.path.basename(filename)
-
- # Make sure the file has been downloaded to the temp dir.
- if os.path.dirname(filename) != tmpdir:
- dst = os.path.join(tmpdir, basename)
- from setuptools.command.easy_install import samefile
- if not samefile(filename, dst):
- shutil.copy2(filename, dst)
- filename=dst
-
- file = open(os.path.join(tmpdir, 'setup.py'), 'w')
- file.write(
- "from setuptools import setup\n"
- "setup(name=%r, version=%r, py_modules=[%r])\n"
- % (
- dists[0].project_name, dists[0].version,
- os.path.splitext(basename)[0]
- )
- )
- file.close()
- return filename
-
- elif match:
- raise DistutilsError(
- "Can't unambiguously interpret project/version identifier %r; "
- "any dashes in the name or version should be escaped using "
- "underscores. %r" % (fragment,dists)
- )
- else:
- raise DistutilsError(
- "Can't process plain .py files without an '#egg=name-version'"
- " suffix to enable automatic setup script generation."
- )
-
- dl_blocksize = 8192
- def _download_to(self, url, filename):
- self.info("Downloading %s", url)
- # Download the file
- fp, tfp, info = None, None, None
- try:
- if '#' in url:
- url, info = url.split('#', 1)
- fp = self.open_url(url)
- if isinstance(fp, urllib2.HTTPError):
- raise DistutilsError(
- "Can't download %s: %s %s" % (url, fp.code,fp.msg)
- )
- cs = md5()
- headers = fp.info()
- blocknum = 0
- bs = self.dl_blocksize
- size = -1
- if "content-length" in headers:
- size = int(headers["Content-Length"])
- self.reporthook(url, filename, blocknum, bs, size)
- tfp = open(filename,'wb')
- while True:
- block = fp.read(bs)
- if block:
- cs.update(block)
- tfp.write(block)
- blocknum += 1
- self.reporthook(url, filename, blocknum, bs, size)
- else:
- break
- if info: self.check_md5(cs, info, filename, tfp)
- return headers
- finally:
- if fp: fp.close()
- if tfp: tfp.close()
-
- def reporthook(self, url, filename, blocknum, blksize, size):
- pass # no-op
-
-
- def open_url(self, url, warning=None):
- if url.startswith('file:'):
- return local_open(url)
- try:
- return open_with_auth(url)
- except (ValueError, httplib.InvalidURL), v:
- msg = ' '.join([str(arg) for arg in v.args])
- if warning:
- self.warn(warning, msg)
- else:
- raise DistutilsError('%s %s' % (url, msg))
- except urllib2.HTTPError, v:
- return v
- except urllib2.URLError, v:
- if warning:
- self.warn(warning, v.reason)
- else:
- raise DistutilsError("Download error for %s: %s"
- % (url, v.reason))
- except httplib.BadStatusLine, v:
- if warning:
- self.warn(warning, v.line)
- else:
- raise DistutilsError('%s returned a bad status line. '
- 'The server might be down, %s' % \
- (url, v.line))
- except httplib.HTTPException, v:
- if warning:
- self.warn(warning, v)
- else:
- raise DistutilsError("Download error for %s: %s"
- % (url, v))
-
- def _download_url(self, scheme, url, tmpdir):
- # Determine download filename
- #
- name = filter(None,urlparse.urlparse(url)[2].split('/'))
- if name:
- name = name[-1]
- while '..' in name:
- name = name.replace('..','.').replace('\\','_')
- else:
- name = "__downloaded__" # default if URL has no path contents
-
- if name.endswith('.egg.zip'):
- name = name[:-4] # strip the extra .zip before download
-
- filename = os.path.join(tmpdir,name)
-
- # Download the file
- #
- if scheme=='svn' or scheme.startswith('svn+'):
- return self._download_svn(url, filename)
- elif scheme=='file':
- return urllib2.url2pathname(urlparse.urlparse(url)[2])
- else:
- self.url_ok(url, True) # raises error if not allowed
- return self._attempt_download(url, filename)
-
-
-
- def scan_url(self, url):
- self.process_url(url, True)
-
-
- def _attempt_download(self, url, filename):
- headers = self._download_to(url, filename)
- if 'html' in headers.get('content-type','').lower():
- return self._download_html(url, headers, filename)
- else:
- return filename
-
- def _download_html(self, url, headers, filename):
- file = open(filename)
- for line in file:
- if line.strip():
- # Check for a subversion index page
- if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
- # it's a subversion index page:
- file.close()
- os.unlink(filename)
- return self._download_svn(url, filename)
- break # not an index page
- file.close()
- os.unlink(filename)
- raise DistutilsError("Unexpected HTML page found at "+url)
-
- def _download_svn(self, url, filename):
- url = url.split('#',1)[0] # remove any fragment for svn's sake
- self.info("Doing subversion checkout from %s to %s", url, filename)
- os.system("svn checkout -q %s %s" % (url, filename))
- return filename
-
- def debug(self, msg, *args):
- log.debug(msg, *args)
-
- def info(self, msg, *args):
- log.info(msg, *args)
-
- def warn(self, msg, *args):
- log.warn(msg, *args)
-
-# This pattern matches a character entity reference (a decimal numeric
-# references, a hexadecimal numeric reference, or a named reference).
-entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
-
-def uchr(c):
- if not isinstance(c, int):
- return c
- if c>255: return unichr(c)
- return chr(c)
-
-def decode_entity(match):
- what = match.group(1)
- if what.startswith('#x'):
- what = int(what[2:], 16)
- elif what.startswith('#'):
- what = int(what[1:])
- else:
- from htmlentitydefs import name2codepoint
- what = name2codepoint.get(what, match.group(0))
- return uchr(what)
-
-def htmldecode(text):
- """Decode HTML entities in the given text."""
- return entity_sub(decode_entity, text)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def socket_timeout(timeout=15):
- def _socket_timeout(func):
- def _socket_timeout(*args, **kwargs):
- old_timeout = socket.getdefaulttimeout()
- socket.setdefaulttimeout(timeout)
- try:
- return func(*args, **kwargs)
- finally:
- socket.setdefaulttimeout(old_timeout)
- return _socket_timeout
- return _socket_timeout
-
-
-def open_with_auth(url):
- """Open a urllib2 request, handling HTTP authentication"""
-
- scheme, netloc, path, params, query, frag = urlparse.urlparse(url)
-
- if scheme in ('http', 'https'):
- auth, host = urllib2.splituser(netloc)
- else:
- auth = None
-
- if auth:
- auth = "Basic " + urllib2.unquote(auth).encode('base64').strip()
- new_url = urlparse.urlunparse((scheme,host,path,params,query,frag))
- request = urllib2.Request(new_url)
- request.add_header("Authorization", auth)
- else:
- request = urllib2.Request(url)
-
- request.add_header('User-Agent', user_agent)
- fp = urllib2.urlopen(request)
-
- if auth:
- # Put authentication info back into request URL if same host,
- # so that links found on the page will work
- s2, h2, path2, param2, query2, frag2 = urlparse.urlparse(fp.url)
- if s2==scheme and h2==host:
- fp.url = urlparse.urlunparse((s2,netloc,path2,param2,query2,frag2))
-
- return fp
-
-# adding a timeout to avoid freezing package_index
-open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
-
-
-
-
-
-
-
-
-
-
-
-def fix_sf_url(url):
- return url # backward compatibility
-
-def local_open(url):
- """Read a local path, with special support for directories"""
- scheme, server, path, param, query, frag = urlparse.urlparse(url)
- filename = urllib2.url2pathname(path)
- if os.path.isfile(filename):
- return urllib2.urlopen(url)
- elif path.endswith('/') and os.path.isdir(filename):
- files = []
- for f in os.listdir(filename):
- if f=='index.html':
- fp = open(os.path.join(filename,f),'rb')
- body = fp.read()
- fp.close()
- break
- elif os.path.isdir(os.path.join(filename,f)):
- f+='/'
- files.append("<a href=%r>%s</a>" % (f,f))
- else:
- body = ("<html><head><title>%s</title>" % url) + \
- "</head><body>%s</body></html>" % '\n'.join(files)
- status, message = 200, "OK"
- else:
- status, message, body = 404, "Path not found", "Not found"
-
- return urllib2.HTTPError(url, status, message,
- {'content-type':'text/html'}, cStringIO.StringIO(body))
-
-
-
-
-
-
-
-
-
-
-
-
-
-# this line is a kludge to keep the trailing blank lines for pje's editor
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py
deleted file mode 100755
index a06d4483..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py
+++ /dev/null
@@ -1,282 +0,0 @@
-import os, sys, __builtin__, tempfile, operator
-_os = sys.modules[os.name]
-try:
- _file = file
-except NameError:
- _file = None
-_open = open
-from distutils.errors import DistutilsError
-__all__ = [
- "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
-]
-def run_setup(setup_script, args):
- """Run a distutils setup script, sandboxed in its directory"""
- old_dir = os.getcwd()
- save_argv = sys.argv[:]
- save_path = sys.path[:]
- setup_dir = os.path.abspath(os.path.dirname(setup_script))
- temp_dir = os.path.join(setup_dir,'temp')
- if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
- save_tmp = tempfile.tempdir
- save_modules = sys.modules.copy()
- try:
- tempfile.tempdir = temp_dir
- os.chdir(setup_dir)
- try:
- sys.argv[:] = [setup_script]+list(args)
- sys.path.insert(0, setup_dir)
- DirectorySandbox(setup_dir).run(
- lambda: execfile(
- "setup.py",
- {'__file__':setup_script, '__name__':'__main__'}
- )
- )
- except SystemExit, v:
- if v.args and v.args[0]:
- raise
- # Normal exit, just return
- finally:
- sys.modules.update(save_modules)
- for key in list(sys.modules):
- if key not in save_modules: del sys.modules[key]
- os.chdir(old_dir)
- sys.path[:] = save_path
- sys.argv[:] = save_argv
- tempfile.tempdir = save_tmp
-
-class AbstractSandbox:
- """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
-
- _active = False
-
- def __init__(self):
- self._attrs = [
- name for name in dir(_os)
- if not name.startswith('_') and hasattr(self,name)
- ]
-
- def _copy(self, source):
- for name in self._attrs:
- setattr(os, name, getattr(source,name))
-
- def run(self, func):
- """Run 'func' under os sandboxing"""
- try:
- self._copy(self)
- if _file:
- __builtin__.file = self._file
- __builtin__.open = self._open
- self._active = True
- return func()
- finally:
- self._active = False
- if _file:
- __builtin__.file = _file
- __builtin__.open = _open
- self._copy(_os)
-
-
- def _mk_dual_path_wrapper(name):
- original = getattr(_os,name)
- def wrap(self,src,dst,*args,**kw):
- if self._active:
- src,dst = self._remap_pair(name,src,dst,*args,**kw)
- return original(src,dst,*args,**kw)
- return wrap
-
-
- for name in ["rename", "link", "symlink"]:
- if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
-
-
- def _mk_single_path_wrapper(name, original=None):
- original = original or getattr(_os,name)
- def wrap(self,path,*args,**kw):
- if self._active:
- path = self._remap_input(name,path,*args,**kw)
- return original(path,*args,**kw)
- return wrap
-
- if _file:
- _file = _mk_single_path_wrapper('file', _file)
- _open = _mk_single_path_wrapper('open', _open)
- for name in [
- "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
- "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
- "startfile", "mkfifo", "mknod", "pathconf", "access"
- ]:
- if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
-
-
- def _mk_single_with_return(name):
- original = getattr(_os,name)
- def wrap(self,path,*args,**kw):
- if self._active:
- path = self._remap_input(name,path,*args,**kw)
- return self._remap_output(name, original(path,*args,**kw))
- return original(path,*args,**kw)
- return wrap
-
- for name in ['readlink', 'tempnam']:
- if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
-
- def _mk_query(name):
- original = getattr(_os,name)
- def wrap(self,*args,**kw):
- retval = original(*args,**kw)
- if self._active:
- return self._remap_output(name, retval)
- return retval
- return wrap
-
- for name in ['getcwd', 'tmpnam']:
- if hasattr(_os,name): locals()[name] = _mk_query(name)
-
- def _validate_path(self,path):
- """Called to remap or validate any path, whether input or output"""
- return path
-
- def _remap_input(self,operation,path,*args,**kw):
- """Called for path inputs"""
- return self._validate_path(path)
-
- def _remap_output(self,operation,path):
- """Called for path outputs"""
- return self._validate_path(path)
-
- def _remap_pair(self,operation,src,dst,*args,**kw):
- """Called for path pairs like rename, link, and symlink operations"""
- return (
- self._remap_input(operation+'-from',src,*args,**kw),
- self._remap_input(operation+'-to',dst,*args,**kw)
- )
-
-
-if hasattr(os, 'devnull'):
- _EXCEPTIONS = [os.devnull,]
-else:
- _EXCEPTIONS = []
-
-try:
- from win32com.client.gencache import GetGeneratePath
- _EXCEPTIONS.append(GetGeneratePath())
- del GetGeneratePath
-except ImportError:
- # it appears pywin32 is not installed, so no need to exclude.
- pass
-
-class DirectorySandbox(AbstractSandbox):
- """Restrict operations to a single subdirectory - pseudo-chroot"""
-
- write_ops = dict.fromkeys([
- "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
- "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
- ])
-
- def __init__(self, sandbox, exceptions=_EXCEPTIONS):
- self._sandbox = os.path.normcase(os.path.realpath(sandbox))
- self._prefix = os.path.join(self._sandbox,'')
- self._exceptions = [os.path.normcase(os.path.realpath(path)) for path in exceptions]
- AbstractSandbox.__init__(self)
-
- def _violation(self, operation, *args, **kw):
- raise SandboxViolation(operation, args, kw)
-
- if _file:
- def _file(self, path, mode='r', *args, **kw):
- if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
- self._violation("file", path, mode, *args, **kw)
- return _file(path,mode,*args,**kw)
-
- def _open(self, path, mode='r', *args, **kw):
- if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
- self._violation("open", path, mode, *args, **kw)
- return _open(path,mode,*args,**kw)
-
- def tmpnam(self):
- self._violation("tmpnam")
-
- def _ok(self,path):
- active = self._active
- try:
- self._active = False
- realpath = os.path.normcase(os.path.realpath(path))
- if (self._exempted(realpath) or realpath == self._sandbox
- or realpath.startswith(self._prefix)):
- return True
- finally:
- self._active = active
-
- def _exempted(self, filepath):
- exception_matches = map(filepath.startswith, self._exceptions)
- return True in exception_matches
-
- def _remap_input(self,operation,path,*args,**kw):
- """Called for path inputs"""
- if operation in self.write_ops and not self._ok(path):
- self._violation(operation, os.path.realpath(path), *args, **kw)
- return path
-
- def _remap_pair(self,operation,src,dst,*args,**kw):
- """Called for path pairs like rename, link, and symlink operations"""
- if not self._ok(src) or not self._ok(dst):
- self._violation(operation, src, dst, *args, **kw)
- return (src,dst)
-
- def open(self, file, flags, mode=0777):
- """Called for low-level os.open()"""
- if flags & WRITE_FLAGS and not self._ok(file):
- self._violation("os.open", file, flags, mode)
- return _os.open(file,flags,mode)
-
-
-WRITE_FLAGS = reduce(
- operator.or_,
- [getattr(_os, a, 0) for a in
- "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
-)
-
-
-
-
-class SandboxViolation(DistutilsError):
- """A setup script attempted to modify the filesystem outside the sandbox"""
-
- def __str__(self):
- return """SandboxViolation: %s%r %s
-
-The package setup script has attempted to modify files on your system
-that are not within the EasyInstall build area, and has been aborted.
-
-This package cannot be safely installed by EasyInstall, and may not
-support alternate installation locations even if you run its setup
-script by hand. Please inform the package's author and the EasyInstall
-maintainers to find out if a fix or workaround is available.""" % self.args
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-#
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py
deleted file mode 100755
index 9af44a88..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py
+++ /dev/null
@@ -1,370 +0,0 @@
-"""Tests for the 'setuptools' package"""
-from unittest import TestSuite, TestCase, makeSuite, defaultTestLoader
-import distutils.core, distutils.cmd
-from distutils.errors import DistutilsOptionError, DistutilsPlatformError
-from distutils.errors import DistutilsSetupError
-import setuptools, setuptools.dist
-from setuptools import Feature
-from distutils.core import Extension
-extract_constant, get_module_constant = None, None
-from setuptools.depends import *
-from distutils.version import StrictVersion, LooseVersion
-from distutils.util import convert_path
-import sys, os.path
-
-def additional_tests():
- import doctest, unittest
- suite = unittest.TestSuite((
- doctest.DocFileSuite(
- os.path.join('tests', 'api_tests.txt'),
- optionflags=doctest.ELLIPSIS, package='pkg_resources',
- ),
- ))
- if sys.platform == 'win32':
- suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt'))
- return suite
-
-def makeSetup(**args):
- """Return distribution from 'setup(**args)', without executing commands"""
-
- distutils.core._setup_stop_after = "commandline"
-
- # Don't let system command line leak into tests!
- args.setdefault('script_args',['install'])
-
- try:
- return setuptools.setup(**args)
- finally:
- distutils.core_setup_stop_after = None
-
-
-
-
-class DependsTests(TestCase):
-
- def testExtractConst(self):
- if not extract_constant: return # skip on non-bytecode platforms
-
- def f1():
- global x,y,z
- x = "test"
- y = z
-
- # unrecognized name
- self.assertEqual(extract_constant(f1.func_code,'q', -1), None)
-
- # constant assigned
- self.assertEqual(extract_constant(f1.func_code,'x', -1), "test")
-
- # expression assigned
- self.assertEqual(extract_constant(f1.func_code,'y', -1), -1)
-
- # recognized name, not assigned
- self.assertEqual(extract_constant(f1.func_code,'z', -1), None)
-
-
- def testFindModule(self):
- self.assertRaises(ImportError, find_module, 'no-such.-thing')
- self.assertRaises(ImportError, find_module, 'setuptools.non-existent')
- f,p,i = find_module('setuptools.tests'); f.close()
-
- def testModuleExtract(self):
- if not get_module_constant: return # skip on non-bytecode platforms
- from email import __version__
- self.assertEqual(
- get_module_constant('email','__version__'), __version__
- )
- self.assertEqual(
- get_module_constant('sys','version'), sys.version
- )
- self.assertEqual(
- get_module_constant('setuptools.tests','__doc__'),__doc__
- )
-
- def testRequire(self):
- if not extract_constant: return # skip on non-bytecode platforms
-
- req = Require('Email','1.0.3','email')
-
- self.assertEqual(req.name, 'Email')
- self.assertEqual(req.module, 'email')
- self.assertEqual(req.requested_version, '1.0.3')
- self.assertEqual(req.attribute, '__version__')
- self.assertEqual(req.full_name(), 'Email-1.0.3')
-
- from email import __version__
- self.assertEqual(req.get_version(), __version__)
- self.assert_(req.version_ok('1.0.9'))
- self.assert_(not req.version_ok('0.9.1'))
- self.assert_(not req.version_ok('unknown'))
-
- self.assert_(req.is_present())
- self.assert_(req.is_current())
-
- req = Require('Email 3000','03000','email',format=LooseVersion)
- self.assert_(req.is_present())
- self.assert_(not req.is_current())
- self.assert_(not req.version_ok('unknown'))
-
- req = Require('Do-what-I-mean','1.0','d-w-i-m')
- self.assert_(not req.is_present())
- self.assert_(not req.is_current())
-
- req = Require('Tests', None, 'tests', homepage="http://example.com")
- self.assertEqual(req.format, None)
- self.assertEqual(req.attribute, None)
- self.assertEqual(req.requested_version, None)
- self.assertEqual(req.full_name(), 'Tests')
- self.assertEqual(req.homepage, 'http://example.com')
-
- paths = [os.path.dirname(p) for p in __path__]
- self.assert_(req.is_present(paths))
- self.assert_(req.is_current(paths))
-
-
-class DistroTests(TestCase):
-
- def setUp(self):
- self.e1 = Extension('bar.ext',['bar.c'])
- self.e2 = Extension('c.y', ['y.c'])
-
- self.dist = makeSetup(
- packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
- py_modules=['b.d','x'],
- ext_modules = (self.e1, self.e2),
- package_dir = {},
- )
-
-
- def testDistroType(self):
- self.assert_(isinstance(self.dist,setuptools.dist.Distribution))
-
-
- def testExcludePackage(self):
- self.dist.exclude_package('a')
- self.assertEqual(self.dist.packages, ['b','c'])
-
- self.dist.exclude_package('b')
- self.assertEqual(self.dist.packages, ['c'])
- self.assertEqual(self.dist.py_modules, ['x'])
- self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
-
- self.dist.exclude_package('c')
- self.assertEqual(self.dist.packages, [])
- self.assertEqual(self.dist.py_modules, ['x'])
- self.assertEqual(self.dist.ext_modules, [self.e1])
-
- # test removals from unspecified options
- makeSetup().exclude_package('x')
-
-
-
-
-
-
-
- def testIncludeExclude(self):
- # remove an extension
- self.dist.exclude(ext_modules=[self.e1])
- self.assertEqual(self.dist.ext_modules, [self.e2])
-
- # add it back in
- self.dist.include(ext_modules=[self.e1])
- self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
-
- # should not add duplicate
- self.dist.include(ext_modules=[self.e1])
- self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
-
- def testExcludePackages(self):
- self.dist.exclude(packages=['c','b','a'])
- self.assertEqual(self.dist.packages, [])
- self.assertEqual(self.dist.py_modules, ['x'])
- self.assertEqual(self.dist.ext_modules, [self.e1])
-
- def testEmpty(self):
- dist = makeSetup()
- dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
- dist = makeSetup()
- dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
-
- def testContents(self):
- self.assert_(self.dist.has_contents_for('a'))
- self.dist.exclude_package('a')
- self.assert_(not self.dist.has_contents_for('a'))
-
- self.assert_(self.dist.has_contents_for('b'))
- self.dist.exclude_package('b')
- self.assert_(not self.dist.has_contents_for('b'))
-
- self.assert_(self.dist.has_contents_for('c'))
- self.dist.exclude_package('c')
- self.assert_(not self.dist.has_contents_for('c'))
-
-
-
-
- def testInvalidIncludeExclude(self):
- self.assertRaises(DistutilsSetupError,
- self.dist.include, nonexistent_option='x'
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.exclude, nonexistent_option='x'
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.include, packages={'x':'y'}
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.exclude, packages={'x':'y'}
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.include, ext_modules={'x':'y'}
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.exclude, ext_modules={'x':'y'}
- )
-
- self.assertRaises(DistutilsSetupError,
- self.dist.include, package_dir=['q']
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.exclude, package_dir=['q']
- )
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class FeatureTests(TestCase):
-
- def setUp(self):
- self.req = Require('Distutils','1.0.3','distutils')
- self.dist = makeSetup(
- features={
- 'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
- 'bar': Feature("bar", standard=True, packages=['pkg.bar'],
- py_modules=['bar_et'], remove=['bar.ext'],
- ),
- 'baz': Feature(
- "baz", optional=False, packages=['pkg.baz'],
- scripts = ['scripts/baz_it'],
- libraries=[('libfoo','foo/foofoo.c')]
- ),
- 'dwim': Feature("DWIM", available=False, remove='bazish'),
- },
- script_args=['--without-bar', 'install'],
- packages = ['pkg.bar', 'pkg.foo'],
- py_modules = ['bar_et', 'bazish'],
- ext_modules = [Extension('bar.ext',['bar.c'])]
- )
-
- def testDefaults(self):
- self.assert_(not
- Feature(
- "test",standard=True,remove='x',available=False
- ).include_by_default()
- )
- self.assert_(
- Feature("test",standard=True,remove='x').include_by_default()
- )
- # Feature must have either kwargs, removes, or require_features
- self.assertRaises(DistutilsSetupError, Feature, "test")
-
- def testAvailability(self):
- self.assertRaises(
- DistutilsPlatformError,
- self.dist.features['dwim'].include_in, self.dist
- )
-
- def testFeatureOptions(self):
- dist = self.dist
- self.assert_(
- ('with-dwim',None,'include DWIM') in dist.feature_options
- )
- self.assert_(
- ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
- )
- self.assert_(
- ('with-bar',None,'include bar (default)') in dist.feature_options
- )
- self.assert_(
- ('without-bar',None,'exclude bar') in dist.feature_options
- )
- self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
- self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
- self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
- self.assert_(not 'without-baz' in dist.feature_negopt)
-
- def testUseFeatures(self):
- dist = self.dist
- self.assertEqual(dist.with_foo,1)
- self.assertEqual(dist.with_bar,0)
- self.assertEqual(dist.with_baz,1)
- self.assert_(not 'bar_et' in dist.py_modules)
- self.assert_(not 'pkg.bar' in dist.packages)
- self.assert_('pkg.baz' in dist.packages)
- self.assert_('scripts/baz_it' in dist.scripts)
- self.assert_(('libfoo','foo/foofoo.c') in dist.libraries)
- self.assertEqual(dist.ext_modules,[])
- self.assertEqual(dist.require_features, [self.req])
-
- # If we ask for bar, it should fail because we explicitly disabled
- # it on the command line
- self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
-
- def testFeatureWithInvalidRemove(self):
- self.assertRaises(
- SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
- )
-
-class TestCommandTests(TestCase):
-
- def testTestIsCommand(self):
- test_cmd = makeSetup().get_command_obj('test')
- self.assert_(isinstance(test_cmd, distutils.cmd.Command))
-
- def testLongOptSuiteWNoDefault(self):
- ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
- ts1 = ts1.get_command_obj('test')
- ts1.ensure_finalized()
- self.assertEqual(ts1.test_suite, 'foo.tests.suite')
-
- def testDefaultSuite(self):
- ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
- ts2.ensure_finalized()
- self.assertEqual(ts2.test_suite, 'bar.tests.suite')
-
- def testDefaultWModuleOnCmdLine(self):
- ts3 = makeSetup(
- test_suite='bar.tests',
- script_args=['test','-m','foo.tests']
- ).get_command_obj('test')
- ts3.ensure_finalized()
- self.assertEqual(ts3.test_module, 'foo.tests')
- self.assertEqual(ts3.test_suite, 'foo.tests.test_suite')
-
- def testConflictingOptions(self):
- ts4 = makeSetup(
- script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
- ).get_command_obj('test')
- self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
-
- def testNoSuite(self):
- ts5 = makeSetup().get_command_obj('test')
- ts5.ensure_finalized()
- self.assertEqual(ts5.test_suite, None)
-
-
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py
deleted file mode 100755
index be399a9d..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py
+++ /dev/null
@@ -1,2679 +0,0 @@
-# Module doctest.
-# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org).
-# Major enhancements and refactoring by:
-# Jim Fulton
-# Edward Loper
-
-# Provided as-is; use at your own risk; no warranty; no promises; enjoy!
-
-try:
- basestring
-except NameError:
- basestring = str,unicode
-
-try:
- enumerate
-except NameError:
- def enumerate(seq):
- return zip(range(len(seq)),seq)
-
-r"""Module doctest -- a framework for running examples in docstrings.
-
-In simplest use, end each module M to be tested with:
-
-def _test():
- import doctest
- doctest.testmod()
-
-if __name__ == "__main__":
- _test()
-
-Then running the module as a script will cause the examples in the
-docstrings to get executed and verified:
-
-python M.py
-
-This won't display anything unless an example fails, in which case the
-failing example(s) and the cause(s) of the failure(s) are printed to stdout
-(why not stderr? because stderr is a lame hack <0.2 wink>), and the final
-line of output is "Test failed.".
-
-Run it with the -v switch instead:
-
-python M.py -v
-
-and a detailed report of all examples tried is printed to stdout, along
-with assorted summaries at the end.
-
-You can force verbose mode by passing "verbose=True" to testmod, or prohibit
-it by passing "verbose=False". In either of those cases, sys.argv is not
-examined by testmod.
-
-There are a variety of other ways to run doctests, including integration
-with the unittest framework, and support for running non-Python text
-files containing doctests. There are also many ways to override parts
-of doctest's default behaviors. See the Library Reference Manual for
-details.
-"""
-
-__docformat__ = 'reStructuredText en'
-
-__all__ = [
- # 0, Option Flags
- 'register_optionflag',
- 'DONT_ACCEPT_TRUE_FOR_1',
- 'DONT_ACCEPT_BLANKLINE',
- 'NORMALIZE_WHITESPACE',
- 'ELLIPSIS',
- 'IGNORE_EXCEPTION_DETAIL',
- 'COMPARISON_FLAGS',
- 'REPORT_UDIFF',
- 'REPORT_CDIFF',
- 'REPORT_NDIFF',
- 'REPORT_ONLY_FIRST_FAILURE',
- 'REPORTING_FLAGS',
- # 1. Utility Functions
- 'is_private',
- # 2. Example & DocTest
- 'Example',
- 'DocTest',
- # 3. Doctest Parser
- 'DocTestParser',
- # 4. Doctest Finder
- 'DocTestFinder',
- # 5. Doctest Runner
- 'DocTestRunner',
- 'OutputChecker',
- 'DocTestFailure',
- 'UnexpectedException',
- 'DebugRunner',
- # 6. Test Functions
- 'testmod',
- 'testfile',
- 'run_docstring_examples',
- # 7. Tester
- 'Tester',
- # 8. Unittest Support
- 'DocTestSuite',
- 'DocFileSuite',
- 'set_unittest_reportflags',
- # 9. Debugging Support
- 'script_from_examples',
- 'testsource',
- 'debug_src',
- 'debug',
-]
-
-import __future__
-
-import sys, traceback, inspect, linecache, os, re, types
-import unittest, difflib, pdb, tempfile
-import warnings
-from StringIO import StringIO
-
-# Don't whine about the deprecated is_private function in this
-# module's tests.
-warnings.filterwarnings("ignore", "is_private", DeprecationWarning,
- __name__, 0)
-
-# There are 4 basic classes:
-# - Example: a <source, want> pair, plus an intra-docstring line number.
-# - DocTest: a collection of examples, parsed from a docstring, plus
-# info about where the docstring came from (name, filename, lineno).
-# - DocTestFinder: extracts DocTests from a given object's docstring and
-# its contained objects' docstrings.
-# - DocTestRunner: runs DocTest cases, and accumulates statistics.
-#
-# So the basic picture is:
-#
-# list of:
-# +------+ +---------+ +-------+
-# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results|
-# +------+ +---------+ +-------+
-# | Example |
-# | ... |
-# | Example |
-# +---------+
-
-# Option constants.
-
-OPTIONFLAGS_BY_NAME = {}
-def register_optionflag(name):
- flag = 1 << len(OPTIONFLAGS_BY_NAME)
- OPTIONFLAGS_BY_NAME[name] = flag
- return flag
-
-DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1')
-DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE')
-NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE')
-ELLIPSIS = register_optionflag('ELLIPSIS')
-IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL')
-
-COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 |
- DONT_ACCEPT_BLANKLINE |
- NORMALIZE_WHITESPACE |
- ELLIPSIS |
- IGNORE_EXCEPTION_DETAIL)
-
-REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
-REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
-REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
-REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
-
-REPORTING_FLAGS = (REPORT_UDIFF |
- REPORT_CDIFF |
- REPORT_NDIFF |
- REPORT_ONLY_FIRST_FAILURE)
-
-# Special string markers for use in `want` strings:
-BLANKLINE_MARKER = '<BLANKLINE>'
-ELLIPSIS_MARKER = '...'
-
-######################################################################
-## Table of Contents
-######################################################################
-# 1. Utility Functions
-# 2. Example & DocTest -- store test cases
-# 3. DocTest Parser -- extracts examples from strings
-# 4. DocTest Finder -- extracts test cases from objects
-# 5. DocTest Runner -- runs test cases
-# 6. Test Functions -- convenient wrappers for testing
-# 7. Tester Class -- for backwards compatibility
-# 8. Unittest Support
-# 9. Debugging Support
-# 10. Example Usage
-
-######################################################################
-## 1. Utility Functions
-######################################################################
-
-def is_private(prefix, base):
- """prefix, base -> true iff name prefix + "." + base is "private".
-
- Prefix may be an empty string, and base does not contain a period.
- Prefix is ignored (although functions you write conforming to this
- protocol may make use of it).
- Return true iff base begins with an (at least one) underscore, but
- does not both begin and end with (at least) two underscores.
-
- >>> is_private("a.b", "my_func")
- False
- >>> is_private("____", "_my_func")
- True
- >>> is_private("someclass", "__init__")
- False
- >>> is_private("sometypo", "__init_")
- True
- >>> is_private("x.y.z", "_")
- True
- >>> is_private("_x.y.z", "__")
- False
- >>> is_private("", "") # senseless but consistent
- False
- """
- warnings.warn("is_private is deprecated; it wasn't useful; "
- "examine DocTestFinder.find() lists instead",
- DeprecationWarning, stacklevel=2)
- return base[:1] == "_" and not base[:2] == "__" == base[-2:]
-
-def _extract_future_flags(globs):
- """
- Return the compiler-flags associated with the future features that
- have been imported into the given namespace (globs).
- """
- flags = 0
- for fname in __future__.all_feature_names:
- feature = globs.get(fname, None)
- if feature is getattr(__future__, fname):
- flags |= feature.compiler_flag
- return flags
-
-def _normalize_module(module, depth=2):
- """
- Return the module specified by `module`. In particular:
- - If `module` is a module, then return module.
- - If `module` is a string, then import and return the
- module with that name.
- - If `module` is None, then return the calling module.
- The calling module is assumed to be the module of
- the stack frame at the given depth in the call stack.
- """
- if inspect.ismodule(module):
- return module
- elif isinstance(module, (str, unicode)):
- return __import__(module, globals(), locals(), ["*"])
- elif module is None:
- return sys.modules[sys._getframe(depth).f_globals['__name__']]
- else:
- raise TypeError("Expected a module, string, or None")
-
-def _indent(s, indent=4):
- """
- Add the given number of space characters to the beginning every
- non-blank line in `s`, and return the result.
- """
- # This regexp matches the start of non-blank lines:
- return re.sub('(?m)^(?!$)', indent*' ', s)
-
-def _exception_traceback(exc_info):
- """
- Return a string containing a traceback message for the given
- exc_info tuple (as returned by sys.exc_info()).
- """
- # Get a traceback message.
- excout = StringIO()
- exc_type, exc_val, exc_tb = exc_info
- traceback.print_exception(exc_type, exc_val, exc_tb, file=excout)
- return excout.getvalue()
-
-# Override some StringIO methods.
-class _SpoofOut(StringIO):
- def getvalue(self):
- result = StringIO.getvalue(self)
- # If anything at all was written, make sure there's a trailing
- # newline. There's no way for the expected output to indicate
- # that a trailing newline is missing.
- if result and not result.endswith("\n"):
- result += "\n"
- # Prevent softspace from screwing up the next test case, in
- # case they used print with a trailing comma in an example.
- if hasattr(self, "softspace"):
- del self.softspace
- return result
-
- def truncate(self, size=None):
- StringIO.truncate(self, size)
- if hasattr(self, "softspace"):
- del self.softspace
-
-# Worst-case linear-time ellipsis matching.
-def _ellipsis_match(want, got):
- """
- Essentially the only subtle case:
- >>> _ellipsis_match('aa...aa', 'aaa')
- False
- """
- if want.find(ELLIPSIS_MARKER)==-1:
- return want == got
-
- # Find "the real" strings.
- ws = want.split(ELLIPSIS_MARKER)
- assert len(ws) >= 2
-
- # Deal with exact matches possibly needed at one or both ends.
- startpos, endpos = 0, len(got)
- w = ws[0]
- if w: # starts with exact match
- if got.startswith(w):
- startpos = len(w)
- del ws[0]
- else:
- return False
- w = ws[-1]
- if w: # ends with exact match
- if got.endswith(w):
- endpos -= len(w)
- del ws[-1]
- else:
- return False
-
- if startpos > endpos:
- # Exact end matches required more characters than we have, as in
- # _ellipsis_match('aa...aa', 'aaa')
- return False
-
- # For the rest, we only need to find the leftmost non-overlapping
- # match for each piece. If there's no overall match that way alone,
- # there's no overall match period.
- for w in ws:
- # w may be '' at times, if there are consecutive ellipses, or
- # due to an ellipsis at the start or end of `want`. That's OK.
- # Search for an empty string succeeds, and doesn't change startpos.
- startpos = got.find(w, startpos, endpos)
- if startpos < 0:
- return False
- startpos += len(w)
-
- return True
-
-def _comment_line(line):
- "Return a commented form of the given line"
- line = line.rstrip()
- if line:
- return '# '+line
- else:
- return '#'
-
-class _OutputRedirectingPdb(pdb.Pdb):
- """
- A specialized version of the python debugger that redirects stdout
- to a given stream when interacting with the user. Stdout is *not*
- redirected when traced code is executed.
- """
- def __init__(self, out):
- self.__out = out
- pdb.Pdb.__init__(self)
-
- def trace_dispatch(self, *args):
- # Redirect stdout to the given stream.
- save_stdout = sys.stdout
- sys.stdout = self.__out
- # Call Pdb's trace dispatch method.
- try:
- return pdb.Pdb.trace_dispatch(self, *args)
- finally:
- sys.stdout = save_stdout
-
-# [XX] Normalize with respect to os.path.pardir?
-def _module_relative_path(module, path):
- if not inspect.ismodule(module):
- raise TypeError, 'Expected a module: %r' % module
- if path.startswith('/'):
- raise ValueError, 'Module-relative files may not have absolute paths'
-
- # Find the base directory for the path.
- if hasattr(module, '__file__'):
- # A normal module/package
- basedir = os.path.split(module.__file__)[0]
- elif module.__name__ == '__main__':
- # An interactive session.
- if len(sys.argv)>0 and sys.argv[0] != '':
- basedir = os.path.split(sys.argv[0])[0]
- else:
- basedir = os.curdir
- else:
- # A module w/o __file__ (this includes builtins)
- raise ValueError("Can't resolve paths relative to the module " +
- module + " (it has no __file__)")
-
- # Combine the base directory and the path.
- return os.path.join(basedir, *(path.split('/')))
-
-######################################################################
-## 2. Example & DocTest
-######################################################################
-## - An "example" is a <source, want> pair, where "source" is a
-## fragment of source code, and "want" is the expected output for
-## "source." The Example class also includes information about
-## where the example was extracted from.
-##
-## - A "doctest" is a collection of examples, typically extracted from
-## a string (such as an object's docstring). The DocTest class also
-## includes information about where the string was extracted from.
-
-class Example:
- """
- A single doctest example, consisting of source code and expected
- output. `Example` defines the following attributes:
-
- - source: A single Python statement, always ending with a newline.
- The constructor adds a newline if needed.
-
- - want: The expected output from running the source code (either
- from stdout, or a traceback in case of exception). `want` ends
- with a newline unless it's empty, in which case it's an empty
- string. The constructor adds a newline if needed.
-
- - exc_msg: The exception message generated by the example, if
- the example is expected to generate an exception; or `None` if
- it is not expected to generate an exception. This exception
- message is compared against the return value of
- `traceback.format_exception_only()`. `exc_msg` ends with a
- newline unless it's `None`. The constructor adds a newline
- if needed.
-
- - lineno: The line number within the DocTest string containing
- this Example where the Example begins. This line number is
- zero-based, with respect to the beginning of the DocTest.
-
- - indent: The example's indentation in the DocTest string.
- I.e., the number of space characters that preceed the
- example's first prompt.
-
- - options: A dictionary mapping from option flags to True or
- False, which is used to override default options for this
- example. Any option flags not contained in this dictionary
- are left at their default value (as specified by the
- DocTestRunner's optionflags). By default, no options are set.
- """
- def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
- options=None):
- # Normalize inputs.
- if not source.endswith('\n'):
- source += '\n'
- if want and not want.endswith('\n'):
- want += '\n'
- if exc_msg is not None and not exc_msg.endswith('\n'):
- exc_msg += '\n'
- # Store properties.
- self.source = source
- self.want = want
- self.lineno = lineno
- self.indent = indent
- if options is None: options = {}
- self.options = options
- self.exc_msg = exc_msg
-
-class DocTest:
- """
- A collection of doctest examples that should be run in a single
- namespace. Each `DocTest` defines the following attributes:
-
- - examples: the list of examples.
-
- - globs: The namespace (aka globals) that the examples should
- be run in.
-
- - name: A name identifying the DocTest (typically, the name of
- the object whose docstring this DocTest was extracted from).
-
- - filename: The name of the file that this DocTest was extracted
- from, or `None` if the filename is unknown.
-
- - lineno: The line number within filename where this DocTest
- begins, or `None` if the line number is unavailable. This
- line number is zero-based, with respect to the beginning of
- the file.
-
- - docstring: The string that the examples were extracted from,
- or `None` if the string is unavailable.
- """
- def __init__(self, examples, globs, name, filename, lineno, docstring):
- """
- Create a new DocTest containing the given examples. The
- DocTest's globals are initialized with a copy of `globs`.
- """
- assert not isinstance(examples, basestring), \
- "DocTest no longer accepts str; use DocTestParser instead"
- self.examples = examples
- self.docstring = docstring
- self.globs = globs.copy()
- self.name = name
- self.filename = filename
- self.lineno = lineno
-
- def __repr__(self):
- if len(self.examples) == 0:
- examples = 'no examples'
- elif len(self.examples) == 1:
- examples = '1 example'
- else:
- examples = '%d examples' % len(self.examples)
- return ('<DocTest %s from %s:%s (%s)>' %
- (self.name, self.filename, self.lineno, examples))
-
-
- # This lets us sort tests by name:
- def __cmp__(self, other):
- if not isinstance(other, DocTest):
- return -1
- return cmp((self.name, self.filename, self.lineno, id(self)),
- (other.name, other.filename, other.lineno, id(other)))
-
-######################################################################
-## 3. DocTestParser
-######################################################################
-
-class DocTestParser:
- """
- A class used to parse strings containing doctest examples.
- """
- # This regular expression is used to find doctest examples in a
- # string. It defines three groups: `source` is the source code
- # (including leading indentation and prompts); `indent` is the
- # indentation of the first (PS1) line of the source code; and
- # `want` is the expected output (including leading indentation).
- _EXAMPLE_RE = re.compile(r'''
- # Source consists of a PS1 line followed by zero or more PS2 lines.
- (?P<source>
- (?:^(?P<indent> [ ]*) >>> .*) # PS1 line
- (?:\n [ ]* \.\.\. .*)*) # PS2 lines
- \n?
- # Want consists of any non-blank lines that do not start with PS1.
- (?P<want> (?:(?![ ]*$) # Not a blank line
- (?![ ]*>>>) # Not a line starting with PS1
- .*$\n? # But any other line
- )*)
- ''', re.MULTILINE | re.VERBOSE)
-
- # A regular expression for handling `want` strings that contain
- # expected exceptions. It divides `want` into three pieces:
- # - the traceback header line (`hdr`)
- # - the traceback stack (`stack`)
- # - the exception message (`msg`), as generated by
- # traceback.format_exception_only()
- # `msg` may have multiple lines. We assume/require that the
- # exception message is the first non-indented line starting with a word
- # character following the traceback header line.
- _EXCEPTION_RE = re.compile(r"""
- # Grab the traceback header. Different versions of Python have
- # said different things on the first traceback line.
- ^(?P<hdr> Traceback\ \(
- (?: most\ recent\ call\ last
- | innermost\ last
- ) \) :
- )
- \s* $ # toss trailing whitespace on the header.
- (?P<stack> .*?) # don't blink: absorb stuff until...
- ^ (?P<msg> \w+ .*) # a line *starts* with alphanum.
- """, re.VERBOSE | re.MULTILINE | re.DOTALL)
-
- # A callable returning a true value iff its argument is a blank line
- # or contains a single comment.
- _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match
-
- def parse(self, string, name='<string>'):
- """
- Divide the given string into examples and intervening text,
- and return them as a list of alternating Examples and strings.
- Line numbers for the Examples are 0-based. The optional
- argument `name` is a name identifying this string, and is only
- used for error messages.
- """
- string = string.expandtabs()
- # If all lines begin with the same indentation, then strip it.
- min_indent = self._min_indent(string)
- if min_indent > 0:
- string = '\n'.join([l[min_indent:] for l in string.split('\n')])
-
- output = []
- charno, lineno = 0, 0
- # Find all doctest examples in the string:
- for m in self._EXAMPLE_RE.finditer(string):
- # Add the pre-example text to `output`.
- output.append(string[charno:m.start()])
- # Update lineno (lines before this example)
- lineno += string.count('\n', charno, m.start())
- # Extract info from the regexp match.
- (source, options, want, exc_msg) = \
- self._parse_example(m, name, lineno)
- # Create an Example, and add it to the list.
- if not self._IS_BLANK_OR_COMMENT(source):
- output.append( Example(source, want, exc_msg,
- lineno=lineno,
- indent=min_indent+len(m.group('indent')),
- options=options) )
- # Update lineno (lines inside this example)
- lineno += string.count('\n', m.start(), m.end())
- # Update charno.
- charno = m.end()
- # Add any remaining post-example text to `output`.
- output.append(string[charno:])
- return output
-
- def get_doctest(self, string, globs, name, filename, lineno):
- """
- Extract all doctest examples from the given string, and
- collect them into a `DocTest` object.
-
- `globs`, `name`, `filename`, and `lineno` are attributes for
- the new `DocTest` object. See the documentation for `DocTest`
- for more information.
- """
- return DocTest(self.get_examples(string, name), globs,
- name, filename, lineno, string)
-
- def get_examples(self, string, name='<string>'):
- """
- Extract all doctest examples from the given string, and return
- them as a list of `Example` objects. Line numbers are
- 0-based, because it's most common in doctests that nothing
- interesting appears on the same line as opening triple-quote,
- and so the first interesting line is called \"line 1\" then.
-
- The optional argument `name` is a name identifying this
- string, and is only used for error messages.
- """
- return [x for x in self.parse(string, name)
- if isinstance(x, Example)]
-
- def _parse_example(self, m, name, lineno):
- """
- Given a regular expression match from `_EXAMPLE_RE` (`m`),
- return a pair `(source, want)`, where `source` is the matched
- example's source code (with prompts and indentation stripped);
- and `want` is the example's expected output (with indentation
- stripped).
-
- `name` is the string's name, and `lineno` is the line number
- where the example starts; both are used for error messages.
- """
- # Get the example's indentation level.
- indent = len(m.group('indent'))
-
- # Divide source into lines; check that they're properly
- # indented; and then strip their indentation & prompts.
- source_lines = m.group('source').split('\n')
- self._check_prompt_blank(source_lines, indent, name, lineno)
- self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
- source = '\n'.join([sl[indent+4:] for sl in source_lines])
-
- # Divide want into lines; check that it's properly indented; and
- # then strip the indentation. Spaces before the last newline should
- # be preserved, so plain rstrip() isn't good enough.
- want = m.group('want')
- want_lines = want.split('\n')
- if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
- del want_lines[-1] # forget final newline & spaces after it
- self._check_prefix(want_lines, ' '*indent, name,
- lineno + len(source_lines))
- want = '\n'.join([wl[indent:] for wl in want_lines])
-
- # If `want` contains a traceback message, then extract it.
- m = self._EXCEPTION_RE.match(want)
- if m:
- exc_msg = m.group('msg')
- else:
- exc_msg = None
-
- # Extract options from the source.
- options = self._find_options(source, name, lineno)
-
- return source, options, want, exc_msg
-
- # This regular expression looks for option directives in the
- # source code of an example. Option directives are comments
- # starting with "doctest:". Warning: this may give false
- # positives for string-literals that contain the string
- # "#doctest:". Eliminating these false positives would require
- # actually parsing the string; but we limit them by ignoring any
- # line containing "#doctest:" that is *followed* by a quote mark.
- _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$',
- re.MULTILINE)
-
- def _find_options(self, source, name, lineno):
- """
- Return a dictionary containing option overrides extracted from
- option directives in the given source string.
-
- `name` is the string's name, and `lineno` is the line number
- where the example starts; both are used for error messages.
- """
- options = {}
- # (note: with the current regexp, this will match at most once:)
- for m in self._OPTION_DIRECTIVE_RE.finditer(source):
- option_strings = m.group(1).replace(',', ' ').split()
- for option in option_strings:
- if (option[0] not in '+-' or
- option[1:] not in OPTIONFLAGS_BY_NAME):
- raise ValueError('line %r of the doctest for %s '
- 'has an invalid option: %r' %
- (lineno+1, name, option))
- flag = OPTIONFLAGS_BY_NAME[option[1:]]
- options[flag] = (option[0] == '+')
- if options and self._IS_BLANK_OR_COMMENT(source):
- raise ValueError('line %r of the doctest for %s has an option '
- 'directive on a line with no example: %r' %
- (lineno, name, source))
- return options
-
- # This regular expression finds the indentation of every non-blank
- # line in a string.
- _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE)
-
- def _min_indent(self, s):
- "Return the minimum indentation of any non-blank line in `s`"
- indents = [len(indent) for indent in self._INDENT_RE.findall(s)]
- if len(indents) > 0:
- return min(indents)
- else:
- return 0
-
- def _check_prompt_blank(self, lines, indent, name, lineno):
- """
- Given the lines of a source string (including prompts and
- leading indentation), check to make sure that every prompt is
- followed by a space character. If any line is not followed by
- a space character, then raise ValueError.
- """
- for i, line in enumerate(lines):
- if len(line) >= indent+4 and line[indent+3] != ' ':
- raise ValueError('line %r of the docstring for %s '
- 'lacks blank after %s: %r' %
- (lineno+i+1, name,
- line[indent:indent+3], line))
-
- def _check_prefix(self, lines, prefix, name, lineno):
- """
- Check that every line in the given list starts with the given
- prefix; if any line does not, then raise a ValueError.
- """
- for i, line in enumerate(lines):
- if line and not line.startswith(prefix):
- raise ValueError('line %r of the docstring for %s has '
- 'inconsistent leading whitespace: %r' %
- (lineno+i+1, name, line))
-
-
-######################################################################
-## 4. DocTest Finder
-######################################################################
-
-class DocTestFinder:
- """
- A class used to extract the DocTests that are relevant to a given
- object, from its docstring and the docstrings of its contained
- objects. Doctests can currently be extracted from the following
- object types: modules, functions, classes, methods, staticmethods,
- classmethods, and properties.
- """
-
- def __init__(self, verbose=False, parser=DocTestParser(),
- recurse=True, _namefilter=None, exclude_empty=True):
- """
- Create a new doctest finder.
-
- The optional argument `parser` specifies a class or
- function that should be used to create new DocTest objects (or
- objects that implement the same interface as DocTest). The
- signature for this factory function should match the signature
- of the DocTest constructor.
-
- If the optional argument `recurse` is false, then `find` will
- only examine the given object, and not any contained objects.
-
- If the optional argument `exclude_empty` is false, then `find`
- will include tests for objects with empty docstrings.
- """
- self._parser = parser
- self._verbose = verbose
- self._recurse = recurse
- self._exclude_empty = exclude_empty
- # _namefilter is undocumented, and exists only for temporary backward-
- # compatibility support of testmod's deprecated isprivate mess.
- self._namefilter = _namefilter
-
- def find(self, obj, name=None, module=None, globs=None,
- extraglobs=None):
- """
- Return a list of the DocTests that are defined by the given
- object's docstring, or by any of its contained objects'
- docstrings.
-
- The optional parameter `module` is the module that contains
- the given object. If the module is not specified or is None, then
- the test finder will attempt to automatically determine the
- correct module. The object's module is used:
-
- - As a default namespace, if `globs` is not specified.
- - To prevent the DocTestFinder from extracting DocTests
- from objects that are imported from other modules.
- - To find the name of the file containing the object.
- - To help find the line number of the object within its
- file.
-
- Contained objects whose module does not match `module` are ignored.
-
- If `module` is False, no attempt to find the module will be made.
- This is obscure, of use mostly in tests: if `module` is False, or
- is None but cannot be found automatically, then all objects are
- considered to belong to the (non-existent) module, so all contained
- objects will (recursively) be searched for doctests.
-
- The globals for each DocTest is formed by combining `globs`
- and `extraglobs` (bindings in `extraglobs` override bindings
- in `globs`). A new copy of the globals dictionary is created
- for each DocTest. If `globs` is not specified, then it
- defaults to the module's `__dict__`, if specified, or {}
- otherwise. If `extraglobs` is not specified, then it defaults
- to {}.
-
- """
- # If name was not specified, then extract it from the object.
- if name is None:
- name = getattr(obj, '__name__', None)
- if name is None:
- raise ValueError("DocTestFinder.find: name must be given "
- "when obj.__name__ doesn't exist: %r" %
- (type(obj),))
-
- # Find the module that contains the given object (if obj is
- # a module, then module=obj.). Note: this may fail, in which
- # case module will be None.
- if module is False:
- module = None
- elif module is None:
- module = inspect.getmodule(obj)
-
- # Read the module's source code. This is used by
- # DocTestFinder._find_lineno to find the line number for a
- # given object's docstring.
- try:
- file = inspect.getsourcefile(obj) or inspect.getfile(obj)
- source_lines = linecache.getlines(file)
- if not source_lines:
- source_lines = None
- except TypeError:
- source_lines = None
-
- # Initialize globals, and merge in extraglobs.
- if globs is None:
- if module is None:
- globs = {}
- else:
- globs = module.__dict__.copy()
- else:
- globs = globs.copy()
- if extraglobs is not None:
- globs.update(extraglobs)
-
- # Recursively expore `obj`, extracting DocTests.
- tests = []
- self._find(tests, obj, name, module, source_lines, globs, {})
- return tests
-
- def _filter(self, obj, prefix, base):
- """
- Return true if the given object should not be examined.
- """
- return (self._namefilter is not None and
- self._namefilter(prefix, base))
-
- def _from_module(self, module, object):
- """
- Return true if the given object is defined in the given
- module.
- """
- if module is None:
- return True
- elif inspect.isfunction(object):
- return module.__dict__ is object.func_globals
- elif inspect.isclass(object):
- return module.__name__ == object.__module__
- elif inspect.getmodule(object) is not None:
- return module is inspect.getmodule(object)
- elif hasattr(object, '__module__'):
- return module.__name__ == object.__module__
- elif isinstance(object, property):
- return True # [XX] no way not be sure.
- else:
- raise ValueError("object must be a class or function")
-
- def _find(self, tests, obj, name, module, source_lines, globs, seen):
- """
- Find tests for the given object and any contained objects, and
- add them to `tests`.
- """
- if self._verbose:
- print 'Finding tests in %s' % name
-
- # If we've already processed this object, then ignore it.
- if id(obj) in seen:
- return
- seen[id(obj)] = 1
-
- # Find a test for this object, and add it to the list of tests.
- test = self._get_test(obj, name, module, globs, source_lines)
- if test is not None:
- tests.append(test)
-
- # Look for tests in a module's contained objects.
- if inspect.ismodule(obj) and self._recurse:
- for valname, val in obj.__dict__.items():
- # Check if this contained object should be ignored.
- if self._filter(val, name, valname):
- continue
- valname = '%s.%s' % (name, valname)
- # Recurse to functions & classes.
- if ((inspect.isfunction(val) or inspect.isclass(val)) and
- self._from_module(module, val)):
- self._find(tests, val, valname, module, source_lines,
- globs, seen)
-
- # Look for tests in a module's __test__ dictionary.
- if inspect.ismodule(obj) and self._recurse:
- for valname, val in getattr(obj, '__test__', {}).items():
- if not isinstance(valname, basestring):
- raise ValueError("DocTestFinder.find: __test__ keys "
- "must be strings: %r" %
- (type(valname),))
- if not (inspect.isfunction(val) or inspect.isclass(val) or
- inspect.ismethod(val) or inspect.ismodule(val) or
- isinstance(val, basestring)):
- raise ValueError("DocTestFinder.find: __test__ values "
- "must be strings, functions, methods, "
- "classes, or modules: %r" %
- (type(val),))
- valname = '%s.__test__.%s' % (name, valname)
- self._find(tests, val, valname, module, source_lines,
- globs, seen)
-
- # Look for tests in a class's contained objects.
- if inspect.isclass(obj) and self._recurse:
- for valname, val in obj.__dict__.items():
- # Check if this contained object should be ignored.
- if self._filter(val, name, valname):
- continue
- # Special handling for staticmethod/classmethod.
- if isinstance(val, staticmethod):
- val = getattr(obj, valname)
- if isinstance(val, classmethod):
- val = getattr(obj, valname).im_func
-
- # Recurse to methods, properties, and nested classes.
- if ((inspect.isfunction(val) or inspect.isclass(val) or
- isinstance(val, property)) and
- self._from_module(module, val)):
- valname = '%s.%s' % (name, valname)
- self._find(tests, val, valname, module, source_lines,
- globs, seen)
-
- def _get_test(self, obj, name, module, globs, source_lines):
- """
- Return a DocTest for the given object, if it defines a docstring;
- otherwise, return None.
- """
- # Extract the object's docstring. If it doesn't have one,
- # then return None (no test for this object).
- if isinstance(obj, basestring):
- docstring = obj
- else:
- try:
- if obj.__doc__ is None:
- docstring = ''
- else:
- docstring = obj.__doc__
- if not isinstance(docstring, basestring):
- docstring = str(docstring)
- except (TypeError, AttributeError):
- docstring = ''
-
- # Find the docstring's location in the file.
- lineno = self._find_lineno(obj, source_lines)
-
- # Don't bother if the docstring is empty.
- if self._exclude_empty and not docstring:
- return None
-
- # Return a DocTest for this object.
- if module is None:
- filename = None
- else:
- filename = getattr(module, '__file__', module.__name__)
- if filename[-4:] in (".pyc", ".pyo"):
- filename = filename[:-1]
- return self._parser.get_doctest(docstring, globs, name,
- filename, lineno)
-
- def _find_lineno(self, obj, source_lines):
- """
- Return a line number of the given object's docstring. Note:
- this method assumes that the object has a docstring.
- """
- lineno = None
-
- # Find the line number for modules.
- if inspect.ismodule(obj):
- lineno = 0
-
- # Find the line number for classes.
- # Note: this could be fooled if a class is defined multiple
- # times in a single file.
- if inspect.isclass(obj):
- if source_lines is None:
- return None
- pat = re.compile(r'^\s*class\s*%s\b' %
- getattr(obj, '__name__', '-'))
- for i, line in enumerate(source_lines):
- if pat.match(line):
- lineno = i
- break
-
- # Find the line number for functions & methods.
- if inspect.ismethod(obj): obj = obj.im_func
- if inspect.isfunction(obj): obj = obj.func_code
- if inspect.istraceback(obj): obj = obj.tb_frame
- if inspect.isframe(obj): obj = obj.f_code
- if inspect.iscode(obj):
- lineno = getattr(obj, 'co_firstlineno', None)-1
-
- # Find the line number where the docstring starts. Assume
- # that it's the first line that begins with a quote mark.
- # Note: this could be fooled by a multiline function
- # signature, where a continuation line begins with a quote
- # mark.
- if lineno is not None:
- if source_lines is None:
- return lineno+1
- pat = re.compile('(^|.*:)\s*\w*("|\')')
- for lineno in range(lineno, len(source_lines)):
- if pat.match(source_lines[lineno]):
- return lineno
-
- # We couldn't find the line number.
- return None
-
-######################################################################
-## 5. DocTest Runner
-######################################################################
-
-class DocTestRunner:
- """
- A class used to run DocTest test cases, and accumulate statistics.
- The `run` method is used to process a single DocTest case. It
- returns a tuple `(f, t)`, where `t` is the number of test cases
- tried, and `f` is the number of test cases that failed.
-
- >>> tests = DocTestFinder().find(_TestClass)
- >>> runner = DocTestRunner(verbose=False)
- >>> for test in tests:
- ... print runner.run(test)
- (0, 2)
- (0, 1)
- (0, 2)
- (0, 2)
-
- The `summarize` method prints a summary of all the test cases that
- have been run by the runner, and returns an aggregated `(f, t)`
- tuple:
-
- >>> runner.summarize(verbose=1)
- 4 items passed all tests:
- 2 tests in _TestClass
- 2 tests in _TestClass.__init__
- 2 tests in _TestClass.get
- 1 tests in _TestClass.square
- 7 tests in 4 items.
- 7 passed and 0 failed.
- Test passed.
- (0, 7)
-
- The aggregated number of tried examples and failed examples is
- also available via the `tries` and `failures` attributes:
-
- >>> runner.tries
- 7
- >>> runner.failures
- 0
-
- The comparison between expected outputs and actual outputs is done
- by an `OutputChecker`. This comparison may be customized with a
- number of option flags; see the documentation for `testmod` for
- more information. If the option flags are insufficient, then the
- comparison may also be customized by passing a subclass of
- `OutputChecker` to the constructor.
-
- The test runner's display output can be controlled in two ways.
- First, an output function (`out) can be passed to
- `TestRunner.run`; this function will be called with strings that
- should be displayed. It defaults to `sys.stdout.write`. If
- capturing the output is not sufficient, then the display output
- can be also customized by subclassing DocTestRunner, and
- overriding the methods `report_start`, `report_success`,
- `report_unexpected_exception`, and `report_failure`.
- """
- # This divider string is used to separate failure messages, and to
- # separate sections of the summary.
- DIVIDER = "*" * 70
-
- def __init__(self, checker=None, verbose=None, optionflags=0):
- """
- Create a new test runner.
-
- Optional keyword arg `checker` is the `OutputChecker` that
- should be used to compare the expected outputs and actual
- outputs of doctest examples.
-
- Optional keyword arg 'verbose' prints lots of stuff if true,
- only failures if false; by default, it's true iff '-v' is in
- sys.argv.
-
- Optional argument `optionflags` can be used to control how the
- test runner compares expected output to actual output, and how
- it displays failures. See the documentation for `testmod` for
- more information.
- """
- self._checker = checker or OutputChecker()
- if verbose is None:
- verbose = '-v' in sys.argv
- self._verbose = verbose
- self.optionflags = optionflags
- self.original_optionflags = optionflags
-
- # Keep track of the examples we've run.
- self.tries = 0
- self.failures = 0
- self._name2ft = {}
-
- # Create a fake output target for capturing doctest output.
- self._fakeout = _SpoofOut()
-
- #/////////////////////////////////////////////////////////////////
- # Reporting methods
- #/////////////////////////////////////////////////////////////////
-
- def report_start(self, out, test, example):
- """
- Report that the test runner is about to process the given
- example. (Only displays a message if verbose=True)
- """
- if self._verbose:
- if example.want:
- out('Trying:\n' + _indent(example.source) +
- 'Expecting:\n' + _indent(example.want))
- else:
- out('Trying:\n' + _indent(example.source) +
- 'Expecting nothing\n')
-
- def report_success(self, out, test, example, got):
- """
- Report that the given example ran successfully. (Only
- displays a message if verbose=True)
- """
- if self._verbose:
- out("ok\n")
-
- def report_failure(self, out, test, example, got):
- """
- Report that the given example failed.
- """
- out(self._failure_header(test, example) +
- self._checker.output_difference(example, got, self.optionflags))
-
- def report_unexpected_exception(self, out, test, example, exc_info):
- """
- Report that the given example raised an unexpected exception.
- """
- out(self._failure_header(test, example) +
- 'Exception raised:\n' + _indent(_exception_traceback(exc_info)))
-
- def _failure_header(self, test, example):
- out = [self.DIVIDER]
- if test.filename:
- if test.lineno is not None and example.lineno is not None:
- lineno = test.lineno + example.lineno + 1
- else:
- lineno = '?'
- out.append('File "%s", line %s, in %s' %
- (test.filename, lineno, test.name))
- else:
- out.append('Line %s, in %s' % (example.lineno+1, test.name))
- out.append('Failed example:')
- source = example.source
- out.append(_indent(source))
- return '\n'.join(out)
-
- #/////////////////////////////////////////////////////////////////
- # DocTest Running
- #/////////////////////////////////////////////////////////////////
-
- def __run(self, test, compileflags, out):
- """
- Run the examples in `test`. Write the outcome of each example
- with one of the `DocTestRunner.report_*` methods, using the
- writer function `out`. `compileflags` is the set of compiler
- flags that should be used to execute examples. Return a tuple
- `(f, t)`, where `t` is the number of examples tried, and `f`
- is the number of examples that failed. The examples are run
- in the namespace `test.globs`.
- """
- # Keep track of the number of failures and tries.
- failures = tries = 0
-
- # Save the option flags (since option directives can be used
- # to modify them).
- original_optionflags = self.optionflags
-
- SUCCESS, FAILURE, BOOM = range(3) # `outcome` state
-
- check = self._checker.check_output
-
- # Process each example.
- for examplenum, example in enumerate(test.examples):
-
- # If REPORT_ONLY_FIRST_FAILURE is set, then supress
- # reporting after the first failure.
- quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
- failures > 0)
-
- # Merge in the example's options.
- self.optionflags = original_optionflags
- if example.options:
- for (optionflag, val) in example.options.items():
- if val:
- self.optionflags |= optionflag
- else:
- self.optionflags &= ~optionflag
-
- # Record that we started this example.
- tries += 1
- if not quiet:
- self.report_start(out, test, example)
-
- # Use a special filename for compile(), so we can retrieve
- # the source code during interactive debugging (see
- # __patched_linecache_getlines).
- filename = '<doctest %s[%d]>' % (test.name, examplenum)
-
- # Run the example in the given context (globs), and record
- # any exception that gets raised. (But don't intercept
- # keyboard interrupts.)
- try:
- # Don't blink! This is where the user's code gets run.
- exec compile(example.source, filename, "single",
- compileflags, 1) in test.globs
- self.debugger.set_continue() # ==== Example Finished ====
- exception = None
- except KeyboardInterrupt:
- raise
- except:
- exception = sys.exc_info()
- self.debugger.set_continue() # ==== Example Finished ====
-
- got = self._fakeout.getvalue() # the actual output
- self._fakeout.truncate(0)
- outcome = FAILURE # guilty until proved innocent or insane
-
- # If the example executed without raising any exceptions,
- # verify its output.
- if exception is None:
- if check(example.want, got, self.optionflags):
- outcome = SUCCESS
-
- # The example raised an exception: check if it was expected.
- else:
- exc_info = sys.exc_info()
- exc_msg = traceback.format_exception_only(*exc_info[:2])[-1]
- if not quiet:
- got += _exception_traceback(exc_info)
-
- # If `example.exc_msg` is None, then we weren't expecting
- # an exception.
- if example.exc_msg is None:
- outcome = BOOM
-
- # We expected an exception: see whether it matches.
- elif check(example.exc_msg, exc_msg, self.optionflags):
- outcome = SUCCESS
-
- # Another chance if they didn't care about the detail.
- elif self.optionflags & IGNORE_EXCEPTION_DETAIL:
- m1 = re.match(r'[^:]*:', example.exc_msg)
- m2 = re.match(r'[^:]*:', exc_msg)
- if m1 and m2 and check(m1.group(0), m2.group(0),
- self.optionflags):
- outcome = SUCCESS
-
- # Report the outcome.
- if outcome is SUCCESS:
- if not quiet:
- self.report_success(out, test, example, got)
- elif outcome is FAILURE:
- if not quiet:
- self.report_failure(out, test, example, got)
- failures += 1
- elif outcome is BOOM:
- if not quiet:
- self.report_unexpected_exception(out, test, example,
- exc_info)
- failures += 1
- else:
- assert False, ("unknown outcome", outcome)
-
- # Restore the option flags (in case they were modified)
- self.optionflags = original_optionflags
-
- # Record and return the number of failures and tries.
- self.__record_outcome(test, failures, tries)
- return failures, tries
-
- def __record_outcome(self, test, f, t):
- """
- Record the fact that the given DocTest (`test`) generated `f`
- failures out of `t` tried examples.
- """
- f2, t2 = self._name2ft.get(test.name, (0,0))
- self._name2ft[test.name] = (f+f2, t+t2)
- self.failures += f
- self.tries += t
-
- __LINECACHE_FILENAME_RE = re.compile(r'<doctest '
- r'(?P<name>[\w\.]+)'
- r'\[(?P<examplenum>\d+)\]>$')
- def __patched_linecache_getlines(self, filename, module_globals=None):
- m = self.__LINECACHE_FILENAME_RE.match(filename)
- if m and m.group('name') == self.test.name:
- example = self.test.examples[int(m.group('examplenum'))]
- return example.source.splitlines(True)
- elif self.save_linecache_getlines.func_code.co_argcount>1:
- return self.save_linecache_getlines(filename, module_globals)
- else:
- return self.save_linecache_getlines(filename)
-
- def run(self, test, compileflags=None, out=None, clear_globs=True):
- """
- Run the examples in `test`, and display the results using the
- writer function `out`.
-
- The examples are run in the namespace `test.globs`. If
- `clear_globs` is true (the default), then this namespace will
- be cleared after the test runs, to help with garbage
- collection. If you would like to examine the namespace after
- the test completes, then use `clear_globs=False`.
-
- `compileflags` gives the set of flags that should be used by
- the Python compiler when running the examples. If not
- specified, then it will default to the set of future-import
- flags that apply to `globs`.
-
- The output of each example is checked using
- `DocTestRunner.check_output`, and the results are formatted by
- the `DocTestRunner.report_*` methods.
- """
- self.test = test
-
- if compileflags is None:
- compileflags = _extract_future_flags(test.globs)
-
- save_stdout = sys.stdout
- if out is None:
- out = save_stdout.write
- sys.stdout = self._fakeout
-
- # Patch pdb.set_trace to restore sys.stdout during interactive
- # debugging (so it's not still redirected to self._fakeout).
- # Note that the interactive output will go to *our*
- # save_stdout, even if that's not the real sys.stdout; this
- # allows us to write test cases for the set_trace behavior.
- save_set_trace = pdb.set_trace
- self.debugger = _OutputRedirectingPdb(save_stdout)
- self.debugger.reset()
- pdb.set_trace = self.debugger.set_trace
-
- # Patch linecache.getlines, so we can see the example's source
- # when we're inside the debugger.
- self.save_linecache_getlines = linecache.getlines
- linecache.getlines = self.__patched_linecache_getlines
-
- try:
- return self.__run(test, compileflags, out)
- finally:
- sys.stdout = save_stdout
- pdb.set_trace = save_set_trace
- linecache.getlines = self.save_linecache_getlines
- if clear_globs:
- test.globs.clear()
-
- #/////////////////////////////////////////////////////////////////
- # Summarization
- #/////////////////////////////////////////////////////////////////
- def summarize(self, verbose=None):
- """
- Print a summary of all the test cases that have been run by
- this DocTestRunner, and return a tuple `(f, t)`, where `f` is
- the total number of failed examples, and `t` is the total
- number of tried examples.
-
- The optional `verbose` argument controls how detailed the
- summary is. If the verbosity is not specified, then the
- DocTestRunner's verbosity is used.
- """
- if verbose is None:
- verbose = self._verbose
- notests = []
- passed = []
- failed = []
- totalt = totalf = 0
- for x in self._name2ft.items():
- name, (f, t) = x
- assert f <= t
- totalt += t
- totalf += f
- if t == 0:
- notests.append(name)
- elif f == 0:
- passed.append( (name, t) )
- else:
- failed.append(x)
- if verbose:
- if notests:
- print len(notests), "items had no tests:"
- notests.sort()
- for thing in notests:
- print " ", thing
- if passed:
- print len(passed), "items passed all tests:"
- passed.sort()
- for thing, count in passed:
- print " %3d tests in %s" % (count, thing)
- if failed:
- print self.DIVIDER
- print len(failed), "items had failures:"
- failed.sort()
- for thing, (f, t) in failed:
- print " %3d of %3d in %s" % (f, t, thing)
- if verbose:
- print totalt, "tests in", len(self._name2ft), "items."
- print totalt - totalf, "passed and", totalf, "failed."
- if totalf:
- print "***Test Failed***", totalf, "failures."
- elif verbose:
- print "Test passed."
- return totalf, totalt
-
- #/////////////////////////////////////////////////////////////////
- # Backward compatibility cruft to maintain doctest.master.
- #/////////////////////////////////////////////////////////////////
- def merge(self, other):
- d = self._name2ft
- for name, (f, t) in other._name2ft.items():
- if name in d:
- print "*** DocTestRunner.merge: '" + name + "' in both" \
- " testers; summing outcomes."
- f2, t2 = d[name]
- f = f + f2
- t = t + t2
- d[name] = f, t
-
-class OutputChecker:
- """
- A class used to check the whether the actual output from a doctest
- example matches the expected output. `OutputChecker` defines two
- methods: `check_output`, which compares a given pair of outputs,
- and returns true if they match; and `output_difference`, which
- returns a string describing the differences between two outputs.
- """
- def check_output(self, want, got, optionflags):
- """
- Return True iff the actual output from an example (`got`)
- matches the expected output (`want`). These strings are
- always considered to match if they are identical; but
- depending on what option flags the test runner is using,
- several non-exact match types are also possible. See the
- documentation for `TestRunner` for more information about
- option flags.
- """
- # Handle the common case first, for efficiency:
- # if they're string-identical, always return true.
- if got == want:
- return True
-
- # The values True and False replaced 1 and 0 as the return
- # value for boolean comparisons in Python 2.3.
- if not (optionflags & DONT_ACCEPT_TRUE_FOR_1):
- if (got,want) == ("True\n", "1\n"):
- return True
- if (got,want) == ("False\n", "0\n"):
- return True
-
- # <BLANKLINE> can be used as a special sequence to signify a
- # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
- if not (optionflags & DONT_ACCEPT_BLANKLINE):
- # Replace <BLANKLINE> in want with a blank line.
- want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER),
- '', want)
- # If a line in got contains only spaces, then remove the
- # spaces.
- got = re.sub('(?m)^\s*?$', '', got)
- if got == want:
- return True
-
- # This flag causes doctest to ignore any differences in the
- # contents of whitespace strings. Note that this can be used
- # in conjunction with the ELLIPSIS flag.
- if optionflags & NORMALIZE_WHITESPACE:
- got = ' '.join(got.split())
- want = ' '.join(want.split())
- if got == want:
- return True
-
- # The ELLIPSIS flag says to let the sequence "..." in `want`
- # match any substring in `got`.
- if optionflags & ELLIPSIS:
- if _ellipsis_match(want, got):
- return True
-
- # We didn't find any match; return false.
- return False
-
- # Should we do a fancy diff?
- def _do_a_fancy_diff(self, want, got, optionflags):
- # Not unless they asked for a fancy diff.
- if not optionflags & (REPORT_UDIFF |
- REPORT_CDIFF |
- REPORT_NDIFF):
- return False
-
- # If expected output uses ellipsis, a meaningful fancy diff is
- # too hard ... or maybe not. In two real-life failures Tim saw,
- # a diff was a major help anyway, so this is commented out.
- # [todo] _ellipsis_match() knows which pieces do and don't match,
- # and could be the basis for a kick-ass diff in this case.
- ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want:
- ## return False
-
- # ndiff does intraline difference marking, so can be useful even
- # for 1-line differences.
- if optionflags & REPORT_NDIFF:
- return True
-
- # The other diff types need at least a few lines to be helpful.
- return want.count('\n') > 2 and got.count('\n') > 2
-
- def output_difference(self, example, got, optionflags):
- """
- Return a string describing the differences between the
- expected output for a given example (`example`) and the actual
- output (`got`). `optionflags` is the set of option flags used
- to compare `want` and `got`.
- """
- want = example.want
- # If <BLANKLINE>s are being used, then replace blank lines
- # with <BLANKLINE> in the actual output string.
- if not (optionflags & DONT_ACCEPT_BLANKLINE):
- got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got)
-
- # Check if we should use diff.
- if self._do_a_fancy_diff(want, got, optionflags):
- # Split want & got into lines.
- want_lines = want.splitlines(True) # True == keep line ends
- got_lines = got.splitlines(True)
- # Use difflib to find their differences.
- if optionflags & REPORT_UDIFF:
- diff = difflib.unified_diff(want_lines, got_lines, n=2)
- diff = list(diff)[2:] # strip the diff header
- kind = 'unified diff with -expected +actual'
- elif optionflags & REPORT_CDIFF:
- diff = difflib.context_diff(want_lines, got_lines, n=2)
- diff = list(diff)[2:] # strip the diff header
- kind = 'context diff with expected followed by actual'
- elif optionflags & REPORT_NDIFF:
- engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)
- diff = list(engine.compare(want_lines, got_lines))
- kind = 'ndiff with -expected +actual'
- else:
- assert 0, 'Bad diff option'
- # Remove trailing whitespace on diff output.
- diff = [line.rstrip() + '\n' for line in diff]
- return 'Differences (%s):\n' % kind + _indent(''.join(diff))
-
- # If we're not using diff, then simply list the expected
- # output followed by the actual output.
- if want and got:
- return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got))
- elif want:
- return 'Expected:\n%sGot nothing\n' % _indent(want)
- elif got:
- return 'Expected nothing\nGot:\n%s' % _indent(got)
- else:
- return 'Expected nothing\nGot nothing\n'
-
-class DocTestFailure(Exception):
- """A DocTest example has failed in debugging mode.
-
- The exception instance has variables:
-
- - test: the DocTest object being run
-
- - excample: the Example object that failed
-
- - got: the actual output
- """
- def __init__(self, test, example, got):
- self.test = test
- self.example = example
- self.got = got
-
- def __str__(self):
- return str(self.test)
-
-class UnexpectedException(Exception):
- """A DocTest example has encountered an unexpected exception
-
- The exception instance has variables:
-
- - test: the DocTest object being run
-
- - excample: the Example object that failed
-
- - exc_info: the exception info
- """
- def __init__(self, test, example, exc_info):
- self.test = test
- self.example = example
- self.exc_info = exc_info
-
- def __str__(self):
- return str(self.test)
-
-class DebugRunner(DocTestRunner):
- r"""Run doc tests but raise an exception as soon as there is a failure.
-
- If an unexpected exception occurs, an UnexpectedException is raised.
- It contains the test, the example, and the original exception:
-
- >>> runner = DebugRunner(verbose=False)
- >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
- ... {}, 'foo', 'foo.py', 0)
- >>> try:
- ... runner.run(test)
- ... except UnexpectedException, failure:
- ... pass
-
- >>> failure.test is test
- True
-
- >>> failure.example.want
- '42\n'
-
- >>> exc_info = failure.exc_info
- >>> raise exc_info[0], exc_info[1], exc_info[2]
- Traceback (most recent call last):
- ...
- KeyError
-
- We wrap the original exception to give the calling application
- access to the test and example information.
-
- If the output doesn't match, then a DocTestFailure is raised:
-
- >>> test = DocTestParser().get_doctest('''
- ... >>> x = 1
- ... >>> x
- ... 2
- ... ''', {}, 'foo', 'foo.py', 0)
-
- >>> try:
- ... runner.run(test)
- ... except DocTestFailure, failure:
- ... pass
-
- DocTestFailure objects provide access to the test:
-
- >>> failure.test is test
- True
-
- As well as to the example:
-
- >>> failure.example.want
- '2\n'
-
- and the actual output:
-
- >>> failure.got
- '1\n'
-
- If a failure or error occurs, the globals are left intact:
-
- >>> del test.globs['__builtins__']
- >>> test.globs
- {'x': 1}
-
- >>> test = DocTestParser().get_doctest('''
- ... >>> x = 2
- ... >>> raise KeyError
- ... ''', {}, 'foo', 'foo.py', 0)
-
- >>> runner.run(test)
- Traceback (most recent call last):
- ...
- UnexpectedException: <DocTest foo from foo.py:0 (2 examples)>
-
- >>> del test.globs['__builtins__']
- >>> test.globs
- {'x': 2}
-
- But the globals are cleared if there is no error:
-
- >>> test = DocTestParser().get_doctest('''
- ... >>> x = 2
- ... ''', {}, 'foo', 'foo.py', 0)
-
- >>> runner.run(test)
- (0, 1)
-
- >>> test.globs
- {}
-
- """
-
- def run(self, test, compileflags=None, out=None, clear_globs=True):
- r = DocTestRunner.run(self, test, compileflags, out, False)
- if clear_globs:
- test.globs.clear()
- return r
-
- def report_unexpected_exception(self, out, test, example, exc_info):
- raise UnexpectedException(test, example, exc_info)
-
- def report_failure(self, out, test, example, got):
- raise DocTestFailure(test, example, got)
-
-######################################################################
-## 6. Test Functions
-######################################################################
-# These should be backwards compatible.
-
-# For backward compatibility, a global instance of a DocTestRunner
-# class, updated by testmod.
-master = None
-
-def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None,
- report=True, optionflags=0, extraglobs=None,
- raise_on_error=False, exclude_empty=False):
- """m=None, name=None, globs=None, verbose=None, isprivate=None,
- report=True, optionflags=0, extraglobs=None, raise_on_error=False,
- exclude_empty=False
-
- Test examples in docstrings in functions and classes reachable
- from module m (or the current module if m is not supplied), starting
- with m.__doc__. Unless isprivate is specified, private names
- are not skipped.
-
- Also test examples reachable from dict m.__test__ if it exists and is
- not None. m.__test__ maps names to functions, classes and strings;
- function and class docstrings are tested even if the name is private;
- strings are tested directly, as if they were docstrings.
-
- Return (#failures, #tests).
-
- See doctest.__doc__ for an overview.
-
- Optional keyword arg "name" gives the name of the module; by default
- use m.__name__.
-
- Optional keyword arg "globs" gives a dict to be used as the globals
- when executing examples; by default, use m.__dict__. A copy of this
- dict is actually used for each docstring, so that each docstring's
- examples start with a clean slate.
-
- Optional keyword arg "extraglobs" gives a dictionary that should be
- merged into the globals that are used to execute examples. By
- default, no extra globals are used. This is new in 2.4.
-
- Optional keyword arg "verbose" prints lots of stuff if true, prints
- only failures if false; by default, it's true iff "-v" is in sys.argv.
-
- Optional keyword arg "report" prints a summary at the end when true,
- else prints nothing at the end. In verbose mode, the summary is
- detailed, else very brief (in fact, empty if all tests passed).
-
- Optional keyword arg "optionflags" or's together module constants,
- and defaults to 0. This is new in 2.3. Possible values (see the
- docs for details):
-
- DONT_ACCEPT_TRUE_FOR_1
- DONT_ACCEPT_BLANKLINE
- NORMALIZE_WHITESPACE
- ELLIPSIS
- IGNORE_EXCEPTION_DETAIL
- REPORT_UDIFF
- REPORT_CDIFF
- REPORT_NDIFF
- REPORT_ONLY_FIRST_FAILURE
-
- Optional keyword arg "raise_on_error" raises an exception on the
- first unexpected exception or failure. This allows failures to be
- post-mortem debugged.
-
- Deprecated in Python 2.4:
- Optional keyword arg "isprivate" specifies a function used to
- determine whether a name is private. The default function is
- treat all functions as public. Optionally, "isprivate" can be
- set to doctest.is_private to skip over functions marked as private
- using the underscore naming convention; see its docs for details.
-
- Advanced tomfoolery: testmod runs methods of a local instance of
- class doctest.Tester, then merges the results into (or creates)
- global Tester instance doctest.master. Methods of doctest.master
- can be called directly too, if you want to do something unusual.
- Passing report=0 to testmod is especially useful then, to delay
- displaying a summary. Invoke doctest.master.summarize(verbose)
- when you're done fiddling.
- """
- global master
-
- if isprivate is not None:
- warnings.warn("the isprivate argument is deprecated; "
- "examine DocTestFinder.find() lists instead",
- DeprecationWarning)
-
- # If no module was given, then use __main__.
- if m is None:
- # DWA - m will still be None if this wasn't invoked from the command
- # line, in which case the following TypeError is about as good an error
- # as we should expect
- m = sys.modules.get('__main__')
-
- # Check that we were actually given a module.
- if not inspect.ismodule(m):
- raise TypeError("testmod: module required; %r" % (m,))
-
- # If no name was given, then use the module's name.
- if name is None:
- name = m.__name__
-
- # Find, parse, and run all tests in the given module.
- finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty)
-
- if raise_on_error:
- runner = DebugRunner(verbose=verbose, optionflags=optionflags)
- else:
- runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
-
- for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
- runner.run(test)
-
- if report:
- runner.summarize()
-
- if master is None:
- master = runner
- else:
- master.merge(runner)
-
- return runner.failures, runner.tries
-
-def testfile(filename, module_relative=True, name=None, package=None,
- globs=None, verbose=None, report=True, optionflags=0,
- extraglobs=None, raise_on_error=False, parser=DocTestParser()):
- """
- Test examples in the given file. Return (#failures, #tests).
-
- Optional keyword arg "module_relative" specifies how filenames
- should be interpreted:
-
- - If "module_relative" is True (the default), then "filename"
- specifies a module-relative path. By default, this path is
- relative to the calling module's directory; but if the
- "package" argument is specified, then it is relative to that
- package. To ensure os-independence, "filename" should use
- "/" characters to separate path segments, and should not
- be an absolute path (i.e., it may not begin with "/").
-
- - If "module_relative" is False, then "filename" specifies an
- os-specific path. The path may be absolute or relative (to
- the current working directory).
-
- Optional keyword arg "name" gives the name of the test; by default
- use the file's basename.
-
- Optional keyword argument "package" is a Python package or the
- name of a Python package whose directory should be used as the
- base directory for a module relative filename. If no package is
- specified, then the calling module's directory is used as the base
- directory for module relative filenames. It is an error to
- specify "package" if "module_relative" is False.
-
- Optional keyword arg "globs" gives a dict to be used as the globals
- when executing examples; by default, use {}. A copy of this dict
- is actually used for each docstring, so that each docstring's
- examples start with a clean slate.
-
- Optional keyword arg "extraglobs" gives a dictionary that should be
- merged into the globals that are used to execute examples. By
- default, no extra globals are used.
-
- Optional keyword arg "verbose" prints lots of stuff if true, prints
- only failures if false; by default, it's true iff "-v" is in sys.argv.
-
- Optional keyword arg "report" prints a summary at the end when true,
- else prints nothing at the end. In verbose mode, the summary is
- detailed, else very brief (in fact, empty if all tests passed).
-
- Optional keyword arg "optionflags" or's together module constants,
- and defaults to 0. Possible values (see the docs for details):
-
- DONT_ACCEPT_TRUE_FOR_1
- DONT_ACCEPT_BLANKLINE
- NORMALIZE_WHITESPACE
- ELLIPSIS
- IGNORE_EXCEPTION_DETAIL
- REPORT_UDIFF
- REPORT_CDIFF
- REPORT_NDIFF
- REPORT_ONLY_FIRST_FAILURE
-
- Optional keyword arg "raise_on_error" raises an exception on the
- first unexpected exception or failure. This allows failures to be
- post-mortem debugged.
-
- Optional keyword arg "parser" specifies a DocTestParser (or
- subclass) that should be used to extract tests from the files.
-
- Advanced tomfoolery: testmod runs methods of a local instance of
- class doctest.Tester, then merges the results into (or creates)
- global Tester instance doctest.master. Methods of doctest.master
- can be called directly too, if you want to do something unusual.
- Passing report=0 to testmod is especially useful then, to delay
- displaying a summary. Invoke doctest.master.summarize(verbose)
- when you're done fiddling.
- """
- global master
-
- if package and not module_relative:
- raise ValueError("Package may only be specified for module-"
- "relative paths.")
-
- # Relativize the path
- if module_relative:
- package = _normalize_module(package)
- filename = _module_relative_path(package, filename)
-
- # If no name was given, then use the file's name.
- if name is None:
- name = os.path.basename(filename)
-
- # Assemble the globals.
- if globs is None:
- globs = {}
- else:
- globs = globs.copy()
- if extraglobs is not None:
- globs.update(extraglobs)
-
- if raise_on_error:
- runner = DebugRunner(verbose=verbose, optionflags=optionflags)
- else:
- runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
-
- # Read the file, convert it to a test, and run it.
- s = open(filename).read()
- test = parser.get_doctest(s, globs, name, filename, 0)
- runner.run(test)
-
- if report:
- runner.summarize()
-
- if master is None:
- master = runner
- else:
- master.merge(runner)
-
- return runner.failures, runner.tries
-
-def run_docstring_examples(f, globs, verbose=False, name="NoName",
- compileflags=None, optionflags=0):
- """
- Test examples in the given object's docstring (`f`), using `globs`
- as globals. Optional argument `name` is used in failure messages.
- If the optional argument `verbose` is true, then generate output
- even if there are no failures.
-
- `compileflags` gives the set of flags that should be used by the
- Python compiler when running the examples. If not specified, then
- it will default to the set of future-import flags that apply to
- `globs`.
-
- Optional keyword arg `optionflags` specifies options for the
- testing and output. See the documentation for `testmod` for more
- information.
- """
- # Find, parse, and run all tests in the given module.
- finder = DocTestFinder(verbose=verbose, recurse=False)
- runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
- for test in finder.find(f, name, globs=globs):
- runner.run(test, compileflags=compileflags)
-
-######################################################################
-## 7. Tester
-######################################################################
-# This is provided only for backwards compatibility. It's not
-# actually used in any way.
-
-class Tester:
- def __init__(self, mod=None, globs=None, verbose=None,
- isprivate=None, optionflags=0):
-
- warnings.warn("class Tester is deprecated; "
- "use class doctest.DocTestRunner instead",
- DeprecationWarning, stacklevel=2)
- if mod is None and globs is None:
- raise TypeError("Tester.__init__: must specify mod or globs")
- if mod is not None and not inspect.ismodule(mod):
- raise TypeError("Tester.__init__: mod must be a module; %r" %
- (mod,))
- if globs is None:
- globs = mod.__dict__
- self.globs = globs
-
- self.verbose = verbose
- self.isprivate = isprivate
- self.optionflags = optionflags
- self.testfinder = DocTestFinder(_namefilter=isprivate)
- self.testrunner = DocTestRunner(verbose=verbose,
- optionflags=optionflags)
-
- def runstring(self, s, name):
- test = DocTestParser().get_doctest(s, self.globs, name, None, None)
- if self.verbose:
- print "Running string", name
- (f,t) = self.testrunner.run(test)
- if self.verbose:
- print f, "of", t, "examples failed in string", name
- return (f,t)
-
- def rundoc(self, object, name=None, module=None):
- f = t = 0
- tests = self.testfinder.find(object, name, module=module,
- globs=self.globs)
- for test in tests:
- (f2, t2) = self.testrunner.run(test)
- (f,t) = (f+f2, t+t2)
- return (f,t)
-
- def rundict(self, d, name, module=None):
- import types
- m = types.ModuleType(name)
- m.__dict__.update(d)
- if module is None:
- module = False
- return self.rundoc(m, name, module)
-
- def run__test__(self, d, name):
- import types
- m = types.ModuleType(name)
- m.__test__ = d
- return self.rundoc(m, name)
-
- def summarize(self, verbose=None):
- return self.testrunner.summarize(verbose)
-
- def merge(self, other):
- self.testrunner.merge(other.testrunner)
-
-######################################################################
-## 8. Unittest Support
-######################################################################
-
-_unittest_reportflags = 0
-
-def set_unittest_reportflags(flags):
- """Sets the unittest option flags.
-
- The old flag is returned so that a runner could restore the old
- value if it wished to:
-
- >>> old = _unittest_reportflags
- >>> set_unittest_reportflags(REPORT_NDIFF |
- ... REPORT_ONLY_FIRST_FAILURE) == old
- True
-
- >>> import doctest
- >>> doctest._unittest_reportflags == (REPORT_NDIFF |
- ... REPORT_ONLY_FIRST_FAILURE)
- True
-
- Only reporting flags can be set:
-
- >>> set_unittest_reportflags(ELLIPSIS)
- Traceback (most recent call last):
- ...
- ValueError: ('Only reporting flags allowed', 8)
-
- >>> set_unittest_reportflags(old) == (REPORT_NDIFF |
- ... REPORT_ONLY_FIRST_FAILURE)
- True
- """
- global _unittest_reportflags
-
- if (flags & REPORTING_FLAGS) != flags:
- raise ValueError("Only reporting flags allowed", flags)
- old = _unittest_reportflags
- _unittest_reportflags = flags
- return old
-
-
-class DocTestCase(unittest.TestCase):
-
- def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
- checker=None):
-
- unittest.TestCase.__init__(self)
- self._dt_optionflags = optionflags
- self._dt_checker = checker
- self._dt_test = test
- self._dt_setUp = setUp
- self._dt_tearDown = tearDown
-
- def setUp(self):
- test = self._dt_test
-
- if self._dt_setUp is not None:
- self._dt_setUp(test)
-
- def tearDown(self):
- test = self._dt_test
-
- if self._dt_tearDown is not None:
- self._dt_tearDown(test)
-
- test.globs.clear()
-
- def runTest(self):
- test = self._dt_test
- old = sys.stdout
- new = StringIO()
- optionflags = self._dt_optionflags
-
- if not (optionflags & REPORTING_FLAGS):
- # The option flags don't include any reporting flags,
- # so add the default reporting flags
- optionflags |= _unittest_reportflags
-
- runner = DocTestRunner(optionflags=optionflags,
- checker=self._dt_checker, verbose=False)
-
- try:
- runner.DIVIDER = "-"*70
- failures, tries = runner.run(
- test, out=new.write, clear_globs=False)
- finally:
- sys.stdout = old
-
- if failures:
- raise self.failureException(self.format_failure(new.getvalue()))
-
- def format_failure(self, err):
- test = self._dt_test
- if test.lineno is None:
- lineno = 'unknown line number'
- else:
- lineno = '%s' % test.lineno
- lname = '.'.join(test.name.split('.')[-1:])
- return ('Failed doctest test for %s\n'
- ' File "%s", line %s, in %s\n\n%s'
- % (test.name, test.filename, lineno, lname, err)
- )
-
- def debug(self):
- r"""Run the test case without results and without catching exceptions
-
- The unit test framework includes a debug method on test cases
- and test suites to support post-mortem debugging. The test code
- is run in such a way that errors are not caught. This way a
- caller can catch the errors and initiate post-mortem debugging.
-
- The DocTestCase provides a debug method that raises
- UnexpectedException errors if there is an unexepcted
- exception:
-
- >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
- ... {}, 'foo', 'foo.py', 0)
- >>> case = DocTestCase(test)
- >>> try:
- ... case.debug()
- ... except UnexpectedException, failure:
- ... pass
-
- The UnexpectedException contains the test, the example, and
- the original exception:
-
- >>> failure.test is test
- True
-
- >>> failure.example.want
- '42\n'
-
- >>> exc_info = failure.exc_info
- >>> raise exc_info[0], exc_info[1], exc_info[2]
- Traceback (most recent call last):
- ...
- KeyError
-
- If the output doesn't match, then a DocTestFailure is raised:
-
- >>> test = DocTestParser().get_doctest('''
- ... >>> x = 1
- ... >>> x
- ... 2
- ... ''', {}, 'foo', 'foo.py', 0)
- >>> case = DocTestCase(test)
-
- >>> try:
- ... case.debug()
- ... except DocTestFailure, failure:
- ... pass
-
- DocTestFailure objects provide access to the test:
-
- >>> failure.test is test
- True
-
- As well as to the example:
-
- >>> failure.example.want
- '2\n'
-
- and the actual output:
-
- >>> failure.got
- '1\n'
-
- """
-
- self.setUp()
- runner = DebugRunner(optionflags=self._dt_optionflags,
- checker=self._dt_checker, verbose=False)
- runner.run(self._dt_test)
- self.tearDown()
-
- def id(self):
- return self._dt_test.name
-
- def __repr__(self):
- name = self._dt_test.name.split('.')
- return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
-
- __str__ = __repr__
-
- def shortDescription(self):
- return "Doctest: " + self._dt_test.name
-
-def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
- **options):
- """
- Convert doctest tests for a module to a unittest test suite.
-
- This converts each documentation string in a module that
- contains doctest tests to a unittest test case. If any of the
- tests in a doc string fail, then the test case fails. An exception
- is raised showing the name of the file containing the test and a
- (sometimes approximate) line number.
-
- The `module` argument provides the module to be tested. The argument
- can be either a module or a module name.
-
- If no argument is given, the calling module is used.
-
- A number of options may be provided as keyword arguments:
-
- setUp
- A set-up function. This is called before running the
- tests in each file. The setUp function will be passed a DocTest
- object. The setUp function can access the test globals as the
- globs attribute of the test passed.
-
- tearDown
- A tear-down function. This is called after running the
- tests in each file. The tearDown function will be passed a DocTest
- object. The tearDown function can access the test globals as the
- globs attribute of the test passed.
-
- globs
- A dictionary containing initial global variables for the tests.
-
- optionflags
- A set of doctest option flags expressed as an integer.
- """
-
- if test_finder is None:
- test_finder = DocTestFinder()
-
- module = _normalize_module(module)
- tests = test_finder.find(module, globs=globs, extraglobs=extraglobs)
- if globs is None:
- globs = module.__dict__
- if not tests:
- # Why do we want to do this? Because it reveals a bug that might
- # otherwise be hidden.
- raise ValueError(module, "has no tests")
-
- tests.sort()
- suite = unittest.TestSuite()
- for test in tests:
- if len(test.examples) == 0:
- continue
- if not test.filename:
- filename = module.__file__
- if filename[-4:] in (".pyc", ".pyo"):
- filename = filename[:-1]
- test.filename = filename
- suite.addTest(DocTestCase(test, **options))
-
- return suite
-
-class DocFileCase(DocTestCase):
-
- def id(self):
- return '_'.join(self._dt_test.name.split('.'))
-
- def __repr__(self):
- return self._dt_test.filename
- __str__ = __repr__
-
- def format_failure(self, err):
- return ('Failed doctest test for %s\n File "%s", line 0\n\n%s'
- % (self._dt_test.name, self._dt_test.filename, err)
- )
-
-def DocFileTest(path, module_relative=True, package=None,
- globs=None, parser=DocTestParser(), **options):
- if globs is None:
- globs = {}
-
- if package and not module_relative:
- raise ValueError("Package may only be specified for module-"
- "relative paths.")
-
- # Relativize the path.
- if module_relative:
- package = _normalize_module(package)
- path = _module_relative_path(package, path)
-
- # Find the file and read it.
- name = os.path.basename(path)
- doc = open(path).read()
-
- # Convert it to a test, and wrap it in a DocFileCase.
- test = parser.get_doctest(doc, globs, name, path, 0)
- return DocFileCase(test, **options)
-
-def DocFileSuite(*paths, **kw):
- """A unittest suite for one or more doctest files.
-
- The path to each doctest file is given as a string; the
- interpretation of that string depends on the keyword argument
- "module_relative".
-
- A number of options may be provided as keyword arguments:
-
- module_relative
- If "module_relative" is True, then the given file paths are
- interpreted as os-independent module-relative paths. By
- default, these paths are relative to the calling module's
- directory; but if the "package" argument is specified, then
- they are relative to that package. To ensure os-independence,
- "filename" should use "/" characters to separate path
- segments, and may not be an absolute path (i.e., it may not
- begin with "/").
-
- If "module_relative" is False, then the given file paths are
- interpreted as os-specific paths. These paths may be absolute
- or relative (to the current working directory).
-
- package
- A Python package or the name of a Python package whose directory
- should be used as the base directory for module relative paths.
- If "package" is not specified, then the calling module's
- directory is used as the base directory for module relative
- filenames. It is an error to specify "package" if
- "module_relative" is False.
-
- setUp
- A set-up function. This is called before running the
- tests in each file. The setUp function will be passed a DocTest
- object. The setUp function can access the test globals as the
- globs attribute of the test passed.
-
- tearDown
- A tear-down function. This is called after running the
- tests in each file. The tearDown function will be passed a DocTest
- object. The tearDown function can access the test globals as the
- globs attribute of the test passed.
-
- globs
- A dictionary containing initial global variables for the tests.
-
- optionflags
- A set of doctest option flags expressed as an integer.
-
- parser
- A DocTestParser (or subclass) that should be used to extract
- tests from the files.
- """
- suite = unittest.TestSuite()
-
- # We do this here so that _normalize_module is called at the right
- # level. If it were called in DocFileTest, then this function
- # would be the caller and we might guess the package incorrectly.
- if kw.get('module_relative', True):
- kw['package'] = _normalize_module(kw.get('package'))
-
- for path in paths:
- suite.addTest(DocFileTest(path, **kw))
-
- return suite
-
-######################################################################
-## 9. Debugging Support
-######################################################################
-
-def script_from_examples(s):
- r"""Extract script from text with examples.
-
- Converts text with examples to a Python script. Example input is
- converted to regular code. Example output and all other words
- are converted to comments:
-
- >>> text = '''
- ... Here are examples of simple math.
- ...
- ... Python has super accurate integer addition
- ...
- ... >>> 2 + 2
- ... 5
- ...
- ... And very friendly error messages:
- ...
- ... >>> 1/0
- ... To Infinity
- ... And
- ... Beyond
- ...
- ... You can use logic if you want:
- ...
- ... >>> if 0:
- ... ... blah
- ... ... blah
- ... ...
- ...
- ... Ho hum
- ... '''
-
- >>> print script_from_examples(text)
- # Here are examples of simple math.
- #
- # Python has super accurate integer addition
- #
- 2 + 2
- # Expected:
- ## 5
- #
- # And very friendly error messages:
- #
- 1/0
- # Expected:
- ## To Infinity
- ## And
- ## Beyond
- #
- # You can use logic if you want:
- #
- if 0:
- blah
- blah
- #
- # Ho hum
- """
- output = []
- for piece in DocTestParser().parse(s):
- if isinstance(piece, Example):
- # Add the example's source code (strip trailing NL)
- output.append(piece.source[:-1])
- # Add the expected output:
- want = piece.want
- if want:
- output.append('# Expected:')
- output += ['## '+l for l in want.split('\n')[:-1]]
- else:
- # Add non-example text.
- output += [_comment_line(l)
- for l in piece.split('\n')[:-1]]
-
- # Trim junk on both ends.
- while output and output[-1] == '#':
- output.pop()
- while output and output[0] == '#':
- output.pop(0)
- # Combine the output, and return it.
- return '\n'.join(output)
-
-def testsource(module, name):
- """Extract the test sources from a doctest docstring as a script.
-
- Provide the module (or dotted name of the module) containing the
- test to be debugged and the name (within the module) of the object
- with the doc string with tests to be debugged.
- """
- module = _normalize_module(module)
- tests = DocTestFinder().find(module)
- test = [t for t in tests if t.name == name]
- if not test:
- raise ValueError(name, "not found in tests")
- test = test[0]
- testsrc = script_from_examples(test.docstring)
- return testsrc
-
-def debug_src(src, pm=False, globs=None):
- """Debug a single doctest docstring, in argument `src`'"""
- testsrc = script_from_examples(src)
- debug_script(testsrc, pm, globs)
-
-def debug_script(src, pm=False, globs=None):
- "Debug a test script. `src` is the script, as a string."
- import pdb
-
- # Note that tempfile.NameTemporaryFile() cannot be used. As the
- # docs say, a file so created cannot be opened by name a second time
- # on modern Windows boxes, and execfile() needs to open it.
- srcfilename = tempfile.mktemp(".py", "doctestdebug")
- f = open(srcfilename, 'w')
- f.write(src)
- f.close()
-
- try:
- if globs:
- globs = globs.copy()
- else:
- globs = {}
-
- if pm:
- try:
- execfile(srcfilename, globs, globs)
- except:
- print sys.exc_info()[1]
- pdb.post_mortem(sys.exc_info()[2])
- else:
- # Note that %r is vital here. '%s' instead can, e.g., cause
- # backslashes to get treated as metacharacters on Windows.
- pdb.run("execfile(%r)" % srcfilename, globs, globs)
-
- finally:
- os.remove(srcfilename)
-
-def debug(module, name, pm=False):
- """Debug a single doctest docstring.
-
- Provide the module (or dotted name of the module) containing the
- test to be debugged and the name (within the module) of the object
- with the docstring with tests to be debugged.
- """
- module = _normalize_module(module)
- testsrc = testsource(module, name)
- debug_script(testsrc, pm, module.__dict__)
-
-######################################################################
-## 10. Example Usage
-######################################################################
-class _TestClass:
- """
- A pointless class, for sanity-checking of docstring testing.
-
- Methods:
- square()
- get()
-
- >>> _TestClass(13).get() + _TestClass(-12).get()
- 1
- >>> hex(_TestClass(13).square().get())
- '0xa9'
- """
-
- def __init__(self, val):
- """val -> _TestClass object with associated value val.
-
- >>> t = _TestClass(123)
- >>> print t.get()
- 123
- """
-
- self.val = val
-
- def square(self):
- """square() -> square TestClass's associated value
-
- >>> _TestClass(13).square().get()
- 169
- """
-
- self.val = self.val ** 2
- return self
-
- def get(self):
- """get() -> return TestClass's associated value.
-
- >>> x = _TestClass(-42)
- >>> print x.get()
- -42
- """
-
- return self.val
-
-__test__ = {"_TestClass": _TestClass,
- "string": r"""
- Example of a string object, searched as-is.
- >>> x = 1; y = 2
- >>> x + y, x * y
- (3, 2)
- """,
-
- "bool-int equivalence": r"""
- In 2.2, boolean expressions displayed
- 0 or 1. By default, we still accept
- them. This can be disabled by passing
- DONT_ACCEPT_TRUE_FOR_1 to the new
- optionflags argument.
- >>> 4 == 4
- 1
- >>> 4 == 4
- True
- >>> 4 > 4
- 0
- >>> 4 > 4
- False
- """,
-
- "blank lines": r"""
- Blank lines can be marked with <BLANKLINE>:
- >>> print 'foo\n\nbar\n'
- foo
- <BLANKLINE>
- bar
- <BLANKLINE>
- """,
-
- "ellipsis": r"""
- If the ellipsis flag is used, then '...' can be used to
- elide substrings in the desired output:
- >>> print range(1000) #doctest: +ELLIPSIS
- [0, 1, 2, ..., 999]
- """,
-
- "whitespace normalization": r"""
- If the whitespace normalization flag is used, then
- differences in whitespace are ignored.
- >>> print range(30) #doctest: +NORMALIZE_WHITESPACE
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
- 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
- 27, 28, 29]
- """,
- }
-
-def _test():
- r = unittest.TextTestRunner()
- r.run(DocTestSuite())
-
-if __name__ == "__main__":
- _test()
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py
deleted file mode 100755
index f4aaaa1c..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py
+++ /dev/null
@@ -1,48 +0,0 @@
-"""Basic http server for tests to simulate PyPI or custom indexes
-"""
-import urllib2
-import sys
-from threading import Thread
-from BaseHTTPServer import HTTPServer
-from SimpleHTTPServer import SimpleHTTPRequestHandler
-
-class IndexServer(HTTPServer):
- """Basic single-threaded http server simulating a package index
-
- You can use this server in unittest like this::
- s = IndexServer()
- s.start()
- index_url = s.base_url() + 'mytestindex'
- # do some test requests to the index
- # The index files should be located in setuptools/tests/indexes
- s.stop()
- """
- def __init__(self):
- HTTPServer.__init__(self, ('', 0), SimpleHTTPRequestHandler)
- self._run = True
-
- def serve(self):
- while True:
- self.handle_request()
- if not self._run: break
-
- def start(self):
- self.thread = Thread(target=self.serve)
- self.thread.start()
-
- def stop(self):
- """self.shutdown is not supported on python < 2.6"""
- self._run = False
- try:
- if sys.version > '2.6':
- urllib2.urlopen('http://127.0.0.1:%s/' % self.server_port,
- None, 5)
- else:
- urllib2.urlopen('http://127.0.0.1:%s/' % self.server_port)
- except urllib2.URLError:
- pass
- self.thread.join()
-
- def base_url(self):
- port = self.server_port
- return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py
deleted file mode 100755
index a520ced9..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""build_ext tests
-"""
-import os, shutil, tempfile, unittest
-from distutils.command.build_ext import build_ext as distutils_build_ext
-from setuptools.command.build_ext import build_ext
-from setuptools.dist import Distribution
-
-class TestBuildExtTest(unittest.TestCase):
-
- def test_get_ext_filename(self):
- # setuptools needs to give back the same
- # result than distutils, even if the fullname
- # is not in ext_map
- dist = Distribution()
- cmd = build_ext(dist)
- cmd.ext_map['foo/bar'] = ''
- res = cmd.get_ext_filename('foo')
- wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
- assert res == wanted
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py
deleted file mode 100755
index a567dd5a..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py
+++ /dev/null
@@ -1,82 +0,0 @@
-"""develop tests
-"""
-import sys
-import os, shutil, tempfile, unittest
-import tempfile
-import site
-from StringIO import StringIO
-
-from distutils.errors import DistutilsError
-from setuptools.command.develop import develop
-from setuptools.command import easy_install as easy_install_pkg
-from setuptools.dist import Distribution
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(name='foo')
-"""
-
-class TestDevelopTest(unittest.TestCase):
-
- def setUp(self):
- self.dir = tempfile.mkdtemp()
- setup = os.path.join(self.dir, 'setup.py')
- f = open(setup, 'w')
- f.write(SETUP_PY)
- f.close()
- self.old_cwd = os.getcwd()
- os.chdir(self.dir)
- if sys.version >= "2.6":
- self.old_base = site.USER_BASE
- site.USER_BASE = tempfile.mkdtemp()
- self.old_site = site.USER_SITE
- site.USER_SITE = tempfile.mkdtemp()
-
- def tearDown(self):
- os.chdir(self.old_cwd)
- shutil.rmtree(self.dir)
- if sys.version >= "2.6":
- shutil.rmtree(site.USER_BASE)
- shutil.rmtree(site.USER_SITE)
- site.USER_BASE = self.old_base
- site.USER_SITE = self.old_site
-
- def test_develop(self):
- if sys.version < "2.6":
- return
- dist = Distribution()
- dist.script_name = 'setup.py'
- cmd = develop(dist)
- cmd.user = 1
- cmd.ensure_finalized()
- cmd.install_dir = site.USER_SITE
- cmd.user = 1
- old_stdout = sys.stdout
- sys.stdout = StringIO()
- try:
- cmd.run()
- finally:
- sys.stdout = old_stdout
-
- # let's see if we got our egg link at the right place
- content = os.listdir(site.USER_SITE)
- content.sort()
- self.assertEquals(content, ['UNKNOWN.egg-link', 'easy-install.pth'])
-
- def test_develop_with_setup_requires(self):
-
- wanted = ("Could not find suitable distribution for "
- "Requirement.parse('I-DONT-EXIST')")
- old_dir = os.getcwd()
- os.chdir(self.dir)
- try:
- try:
- dist = Distribution({'setup_requires': ['I_DONT_EXIST']})
- except DistutilsError, e:
- error = str(e)
- if error == wanted:
- pass
- finally:
- os.chdir(old_dir)
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py
deleted file mode 100755
index 85616605..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py
+++ /dev/null
@@ -1,243 +0,0 @@
-"""Easy install Tests
-"""
-import sys
-import os, shutil, tempfile, unittest
-import site
-from StringIO import StringIO
-from setuptools.command.easy_install import easy_install, get_script_args, main
-from setuptools.command.easy_install import PthDistributions
-from setuptools.command import easy_install as easy_install_pkg
-from setuptools.dist import Distribution
-from pkg_resources import Distribution as PRDistribution
-
-try:
- import multiprocessing
- import logging
- _LOG = logging.getLogger('test_easy_install')
- logging.basicConfig(level=logging.INFO, stream=sys.stderr)
- _MULTIPROC = True
-except ImportError:
- _MULTIPROC = False
- _LOG = None
-
-class FakeDist(object):
- def get_entry_map(self, group):
- if group != 'console_scripts':
- return {}
- return {'name': 'ep'}
-
- def as_requirement(self):
- return 'spec'
-
-WANTED = """\
-#!%s
-# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
-__requires__ = 'spec'
-import sys
-from pkg_resources import load_entry_point
-
-if __name__ == '__main__':
- sys.exit(
- load_entry_point('spec', 'console_scripts', 'name')()
- )
-""" % sys.executable
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(name='foo')
-"""
-
-class TestEasyInstallTest(unittest.TestCase):
-
- def test_install_site_py(self):
- dist = Distribution()
- cmd = easy_install(dist)
- cmd.sitepy_installed = False
- cmd.install_dir = tempfile.mkdtemp()
- try:
- cmd.install_site_py()
- sitepy = os.path.join(cmd.install_dir, 'site.py')
- self.assert_(os.path.exists(sitepy))
- finally:
- shutil.rmtree(cmd.install_dir)
-
- def test_get_script_args(self):
- dist = FakeDist()
-
- old_platform = sys.platform
- try:
- name, script = get_script_args(dist).next()
- finally:
- sys.platform = old_platform
-
- self.assertEquals(script, WANTED)
-
- def test_no_setup_cfg(self):
- # makes sure easy_install as a command (main)
- # doesn't use a setup.cfg file that is located
- # in the current working directory
- dir = tempfile.mkdtemp()
- setup_cfg = open(os.path.join(dir, 'setup.cfg'), 'w')
- setup_cfg.write('[easy_install]\nfind_links = http://example.com')
- setup_cfg.close()
- setup_py = open(os.path.join(dir, 'setup.py'), 'w')
- setup_py.write(SETUP_PY)
- setup_py.close()
-
- from setuptools.dist import Distribution
-
- def _parse_command_line(self):
- msg = 'Error: a local setup.cfg was used'
- opts = self.command_options
- if 'easy_install' in opts:
- assert 'find_links' not in opts['easy_install'], msg
- return self._old_parse_command_line
-
- Distribution._old_parse_command_line = Distribution.parse_command_line
- Distribution.parse_command_line = _parse_command_line
-
- old_wd = os.getcwd()
- try:
- os.chdir(dir)
- main([])
- finally:
- os.chdir(old_wd)
- shutil.rmtree(dir)
-
- def test_no_find_links(self):
- # new option '--no-find-links', that blocks find-links added at
- # the project level
- dist = Distribution()
- cmd = easy_install(dist)
- cmd.check_pth_processing = lambda : True
- cmd.no_find_links = True
- cmd.find_links = ['link1', 'link2']
- cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
- cmd.args = ['ok']
- cmd.ensure_finalized()
- self.assertEquals(cmd.package_index.scanned_urls, {})
-
- # let's try without it (default behavior)
- cmd = easy_install(dist)
- cmd.check_pth_processing = lambda : True
- cmd.find_links = ['link1', 'link2']
- cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
- cmd.args = ['ok']
- cmd.ensure_finalized()
- keys = cmd.package_index.scanned_urls.keys()
- keys.sort()
- self.assertEquals(keys, ['link1', 'link2'])
-
-
-class TestPTHFileWriter(unittest.TestCase):
- def test_add_from_cwd_site_sets_dirty(self):
- '''a pth file manager should set dirty
- if a distribution is in site but also the cwd
- '''
- pth = PthDistributions('does-not_exist', [os.getcwd()])
- self.assert_(not pth.dirty)
- pth.add(PRDistribution(os.getcwd()))
- self.assert_(pth.dirty)
-
- def test_add_from_site_is_ignored(self):
- pth = PthDistributions('does-not_exist', ['/test/location/does-not-have-to-exist'])
- self.assert_(not pth.dirty)
- pth.add(PRDistribution('/test/location/does-not-have-to-exist'))
- self.assert_(not pth.dirty)
-
-
-class TestUserInstallTest(unittest.TestCase):
-
- def setUp(self):
- self.dir = tempfile.mkdtemp()
- setup = os.path.join(self.dir, 'setup.py')
- f = open(setup, 'w')
- f.write(SETUP_PY)
- f.close()
- self.old_cwd = os.getcwd()
- os.chdir(self.dir)
- if sys.version >= "2.6":
- self.old_has_site = easy_install_pkg.HAS_USER_SITE
- self.old_file = easy_install_pkg.__file__
- self.old_base = site.USER_BASE
- site.USER_BASE = tempfile.mkdtemp()
- self.old_site = site.USER_SITE
- site.USER_SITE = tempfile.mkdtemp()
- easy_install_pkg.__file__ = site.USER_SITE
-
- def tearDown(self):
- os.chdir(self.old_cwd)
- shutil.rmtree(self.dir)
- if sys.version >= "2.6":
- shutil.rmtree(site.USER_BASE)
- shutil.rmtree(site.USER_SITE)
- site.USER_BASE = self.old_base
- site.USER_SITE = self.old_site
- easy_install_pkg.HAS_USER_SITE = self.old_has_site
- easy_install_pkg.__file__ = self.old_file
-
- def test_user_install_implied(self):
- easy_install_pkg.HAS_USER_SITE = True # disabled sometimes
- #XXX: replace with something meaningfull
- if sys.version < "2.6":
- return #SKIP
- dist = Distribution()
- dist.script_name = 'setup.py'
- cmd = easy_install(dist)
- cmd.args = ['py']
- cmd.ensure_finalized()
- self.assertTrue(cmd.user, 'user should be implied')
-
- def test_multiproc_atexit(self):
- if not _MULTIPROC:
- return
- _LOG.info('this should not break')
-
- def test_user_install_not_implied_without_usersite_enabled(self):
- easy_install_pkg.HAS_USER_SITE = False # usually enabled
- #XXX: replace with something meaningfull
- if sys.version < "2.6":
- return #SKIP
- dist = Distribution()
- dist.script_name = 'setup.py'
- cmd = easy_install(dist)
- cmd.args = ['py']
- cmd.initialize_options()
- self.assertFalse(cmd.user, 'NOT user should be implied')
-
- def test_local_index(self):
- # make sure the local index is used
- # when easy_install looks for installed
- # packages
- new_location = tempfile.mkdtemp()
- target = tempfile.mkdtemp()
- egg_file = os.path.join(new_location, 'foo-1.0.egg-info')
- f = open(egg_file, 'w')
- try:
- f.write('Name: foo\n')
- except:
- f.close()
-
- sys.path.append(target)
- old_ppath = os.environ.get('PYTHONPATH')
- os.environ['PYTHONPATH'] = ':'.join(sys.path)
- try:
- dist = Distribution()
- dist.script_name = 'setup.py'
- cmd = easy_install(dist)
- cmd.install_dir = target
- cmd.args = ['foo']
- cmd.ensure_finalized()
- cmd.local_index.scan([new_location])
- res = cmd.easy_install('foo')
- self.assertEquals(res.location, new_location)
- finally:
- sys.path.remove(target)
- shutil.rmtree(new_location)
- shutil.rmtree(target)
- if old_ppath is not None:
- os.environ['PYTHONPATH'] = old_ppath
- else:
- del os.environ['PYTHONPATH']
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py
deleted file mode 100755
index 42cb8c1e..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py
+++ /dev/null
@@ -1,112 +0,0 @@
-"""Package Index Tests
-"""
-# More would be better!
-import sys
-import os, shutil, tempfile, unittest, urllib2
-import pkg_resources
-import setuptools.package_index
-from server import IndexServer
-
-class TestPackageIndex(unittest.TestCase):
-
- def test_bad_urls(self):
- index = setuptools.package_index.PackageIndex()
- url = 'http://127.0.0.1:0/nonesuch/test_package_index'
- try:
- v = index.open_url(url)
- except Exception, v:
- self.assert_(url in str(v))
- else:
- self.assert_(isinstance(v,urllib2.HTTPError))
-
- # issue 16
- # easy_install inquant.contentmirror.plone breaks because of a typo
- # in its home URL
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
-
- url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
- try:
- v = index.open_url(url)
- except Exception, v:
- self.assert_(url in str(v))
- else:
- self.assert_(isinstance(v, urllib2.HTTPError))
-
- def _urlopen(*args):
- import httplib
- raise httplib.BadStatusLine('line')
-
- old_urlopen = urllib2.urlopen
- urllib2.urlopen = _urlopen
- url = 'http://example.com'
- try:
- try:
- v = index.open_url(url)
- except Exception, v:
- self.assert_('line' in str(v))
- else:
- raise AssertionError('Should have raise here!')
- finally:
- urllib2.urlopen = old_urlopen
-
- # issue 20
- url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
- try:
- index.open_url(url)
- except Exception, v:
- self.assert_('nonnumeric port' in str(v))
-
-
- # issue #160
- if sys.version_info[0] == 2 and sys.version_info[1] == 7:
- # this should not fail
- url = 'http://example.com'
- page = ('<a href="http://www.famfamfam.com]('
- 'http://www.famfamfam.com/">')
- index.process_index(url, page)
-
-
- def test_url_ok(self):
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
- url = 'file:///tmp/test_package_index'
- self.assert_(index.url_ok(url, True))
-
- def test_links_priority(self):
- """
- Download links from the pypi simple index should be used before
- external download links.
- http://bitbucket.org/tarek/distribute/issue/163/md5-validation-error
-
- Usecase :
- - someone uploads a package on pypi, a md5 is generated
- - someone manually copies this link (with the md5 in the url) onto an
- external page accessible from the package page.
- - someone reuploads the package (with a different md5)
- - while easy_installing, an MD5 error occurs because the external link
- is used
- -> Distribute should use the link from pypi, not the external one.
- """
- # start an index server
- server = IndexServer()
- server.start()
- index_url = server.base_url() + 'test_links_priority/simple/'
-
- # scan a test index
- pi = setuptools.package_index.PackageIndex(index_url)
- requirement = pkg_resources.Requirement.parse('foobar')
- pi.find_packages(requirement)
- server.stop()
-
- # the distribution has been found
- self.assert_('foobar' in pi)
- # we have only one link, because links are compared without md5
- self.assert_(len(pi['foobar'])==1)
- # the link should be from the index
- self.assert_('correct_md5' in pi['foobar'][0].location)
-
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py
deleted file mode 100755
index 883cfad1..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py
+++ /dev/null
@@ -1,565 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-# NOTE: the shebang and encoding lines are for ScriptHeaderTests; do not remove
-from unittest import TestCase, makeSuite; from pkg_resources import *
-from setuptools.command.easy_install import get_script_header, is_sh
-import os, pkg_resources, sys, StringIO
-try: frozenset
-except NameError:
- from sets import ImmutableSet as frozenset
-
-class Metadata(EmptyProvider):
- """Mock object to return metadata as if from an on-disk distribution"""
-
- def __init__(self,*pairs):
- self.metadata = dict(pairs)
-
- def has_metadata(self,name):
- return name in self.metadata
-
- def get_metadata(self,name):
- return self.metadata[name]
-
- def get_metadata_lines(self,name):
- return yield_lines(self.get_metadata(name))
-
-class DistroTests(TestCase):
-
- def testCollection(self):
- # empty path should produce no distributions
- ad = Environment([], platform=None, python=None)
- self.assertEqual(list(ad), [])
- self.assertEqual(ad['FooPkg'],[])
- ad.add(Distribution.from_filename("FooPkg-1.3_1.egg"))
- ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg"))
- ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg"))
-
- # Name is in there now
- self.assert_(ad['FooPkg'])
- # But only 1 package
- self.assertEqual(list(ad), ['foopkg'])
-
- # Distributions sort by version
- self.assertEqual(
- [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
- )
- # Removing a distribution leaves sequence alone
- ad.remove(ad['FooPkg'][1])
- self.assertEqual(
- [dist.version for dist in ad['FooPkg']], ['1.4','1.2']
- )
- # And inserting adds them in order
- ad.add(Distribution.from_filename("FooPkg-1.9.egg"))
- self.assertEqual(
- [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
- )
-
- ws = WorkingSet([])
- foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg")
- foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")
- req, = parse_requirements("FooPkg>=1.3")
-
- # Nominal case: no distros on path, should yield all applicable
- self.assertEqual(ad.best_match(req,ws).version, '1.9')
- # If a matching distro is already installed, should return only that
- ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4')
-
- # If the first matching distro is unsuitable, it's a version conflict
- ws = WorkingSet([]); ws.add(foo12); ws.add(foo14)
- self.assertRaises(VersionConflict, ad.best_match, req, ws)
-
- # If more than one match on the path, the first one takes precedence
- ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14);
- self.assertEqual(ad.best_match(req,ws).version, '1.4')
-
- def checkFooPkg(self,d):
- self.assertEqual(d.project_name, "FooPkg")
- self.assertEqual(d.key, "foopkg")
- self.assertEqual(d.version, "1.3-1")
- self.assertEqual(d.py_version, "2.4")
- self.assertEqual(d.platform, "win32")
- self.assertEqual(d.parsed_version, parse_version("1.3-1"))
-
- def testDistroBasics(self):
- d = Distribution(
- "/some/path",
- project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
- )
- self.checkFooPkg(d)
-
- d = Distribution("/some/path")
- self.assertEqual(d.py_version, sys.version[:3])
- self.assertEqual(d.platform, None)
-
- def testDistroParse(self):
- d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg")
- self.checkFooPkg(d)
- d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info")
- self.checkFooPkg(d)
-
- def testDistroMetadata(self):
- d = Distribution(
- "/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
- metadata = Metadata(
- ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
- )
- )
- self.checkFooPkg(d)
-
-
- def distRequires(self, txt):
- return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
-
- def checkRequires(self, dist, txt, extras=()):
- self.assertEqual(
- list(dist.requires(extras)),
- list(parse_requirements(txt))
- )
-
- def testDistroDependsSimple(self):
- for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
- self.checkRequires(self.distRequires(v), v)
-
-
- def testResolve(self):
- ad = Environment([]); ws = WorkingSet([])
- # Resolving no requirements -> nothing to install
- self.assertEqual( list(ws.resolve([],ad)), [] )
- # Request something not in the collection -> DistributionNotFound
- self.assertRaises(
- DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
- )
- Foo = Distribution.from_filename(
- "/foo_dir/Foo-1.2.egg",
- metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
- )
- ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg"))
-
- # Request thing(s) that are available -> list to activate
- for i in range(3):
- targets = list(ws.resolve(parse_requirements("Foo"), ad))
- self.assertEqual(targets, [Foo])
- map(ws.add,targets)
- self.assertRaises(VersionConflict, ws.resolve,
- parse_requirements("Foo==0.9"), ad)
- ws = WorkingSet([]) # reset
-
- # Request an extra that causes an unresolved dependency for "Baz"
- self.assertRaises(
- DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
- )
- Baz = Distribution.from_filename(
- "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
- )
- ad.add(Baz)
-
- # Activation list now includes resolved dependency
- self.assertEqual(
- list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
- )
- # Requests for conflicting versions produce VersionConflict
- self.assertRaises( VersionConflict,
- ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad
- )
-
- def testDistroDependsOptions(self):
- d = self.distRequires("""
- Twisted>=1.5
- [docgen]
- ZConfig>=2.0
- docutils>=0.3
- [fastcgi]
- fcgiapp>=0.1""")
- self.checkRequires(d,"Twisted>=1.5")
- self.checkRequires(
- d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
- )
- self.checkRequires(
- d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
- )
- self.checkRequires(
- d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
- ["docgen","fastcgi"]
- )
- self.checkRequires(
- d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
- ["fastcgi", "docgen"]
- )
- self.assertRaises(UnknownExtra, d.requires, ["foo"])
-
- def testSetuptoolsDistributeCombination(self):
- # Ensure that installing a 0.7-series setuptools fails. PJE says that
- # it will not co-exist.
- ws = WorkingSet([])
- d = Distribution(
- "/some/path",
- project_name="setuptools",
- version="0.7a1")
- self.assertRaises(ValueError, ws.add, d)
- # A 0.6-series is no problem
- d2 = Distribution(
- "/some/path",
- project_name="setuptools",
- version="0.6c9")
- ws.add(d2)
-
- # a unexisting version needs to work
- ws = WorkingSet([])
- d3 = Distribution(
- "/some/path",
- project_name="setuptools")
- ws.add(d3)
-
-
-class EntryPointTests(TestCase):
-
- def assertfields(self, ep):
- self.assertEqual(ep.name,"foo")
- self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
- self.assertEqual(ep.attrs, ("EntryPointTests",))
- self.assertEqual(ep.extras, ("x",))
- self.assert_(ep.load() is EntryPointTests)
- self.assertEqual(
- str(ep),
- "foo = setuptools.tests.test_resources:EntryPointTests [x]"
- )
-
- def setUp(self):
- self.dist = Distribution.from_filename(
- "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
-
- def testBasics(self):
- ep = EntryPoint(
- "foo", "setuptools.tests.test_resources", ["EntryPointTests"],
- ["x"], self.dist
- )
- self.assertfields(ep)
-
- def testParse(self):
- s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
- ep = EntryPoint.parse(s, self.dist)
- self.assertfields(ep)
-
- ep = EntryPoint.parse("bar baz= spammity[PING]")
- self.assertEqual(ep.name,"bar baz")
- self.assertEqual(ep.module_name,"spammity")
- self.assertEqual(ep.attrs, ())
- self.assertEqual(ep.extras, ("ping",))
-
- ep = EntryPoint.parse(" fizzly = wocka:foo")
- self.assertEqual(ep.name,"fizzly")
- self.assertEqual(ep.module_name,"wocka")
- self.assertEqual(ep.attrs, ("foo",))
- self.assertEqual(ep.extras, ())
-
- def testRejects(self):
- for ep in [
- "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
- ]:
- try: EntryPoint.parse(ep)
- except ValueError: pass
- else: raise AssertionError("Should've been bad", ep)
-
- def checkSubMap(self, m):
- self.assertEqual(len(m), len(self.submap_expect))
- for key, ep in self.submap_expect.iteritems():
- self.assertEqual(repr(m.get(key)), repr(ep))
-
- submap_expect = dict(
- feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
- feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
- feature3=EntryPoint('feature3', 'this.module', extras=['something'])
- )
- submap_str = """
- # define features for blah blah
- feature1 = somemodule:somefunction
- feature2 = another.module:SomeClass [extra1,extra2]
- feature3 = this.module [something]
- """
-
- def testParseList(self):
- self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
- self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
- self.assertRaises(ValueError, EntryPoint.parse_group, "x",
- ["foo=baz", "foo=bar"])
-
- def testParseMap(self):
- m = EntryPoint.parse_map({'xyz':self.submap_str})
- self.checkSubMap(m['xyz'])
- self.assertEqual(m.keys(),['xyz'])
- m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
- self.checkSubMap(m['xyz'])
- self.assertEqual(m.keys(),['xyz'])
- self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
- self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
-
-class RequirementsTests(TestCase):
-
- def testBasics(self):
- r = Requirement.parse("Twisted>=1.2")
- self.assertEqual(str(r),"Twisted>=1.2")
- self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
- self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
- self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
- self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
- self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
- self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
- self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
-
- def testOrdering(self):
- r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
- r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
- self.assertEqual(r1,r2)
- self.assertEqual(str(r1),str(r2))
- self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
-
- def testBasicContains(self):
- r = Requirement("Twisted", [('>=','1.2')], ())
- foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
- twist11 = Distribution.from_filename("Twisted-1.1.egg")
- twist12 = Distribution.from_filename("Twisted-1.2.egg")
- self.assert_(parse_version('1.2') in r)
- self.assert_(parse_version('1.1') not in r)
- self.assert_('1.2' in r)
- self.assert_('1.1' not in r)
- self.assert_(foo_dist not in r)
- self.assert_(twist11 not in r)
- self.assert_(twist12 in r)
-
- def testAdvancedContains(self):
- r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
- for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
- self.assert_(v in r, (v,r))
- for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
- self.assert_(v not in r, (v,r))
-
-
- def testOptionsAndHashing(self):
- r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
- r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
- r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
- self.assertEqual(r1,r2)
- self.assertEqual(r1,r3)
- self.assertEqual(r1.extras, ("foo","bar"))
- self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized
- self.assertEqual(hash(r1), hash(r2))
- self.assertEqual(
- hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
- frozenset(["foo","bar"])))
- )
-
- def testVersionEquality(self):
- r1 = Requirement.parse("foo==0.3a2")
- r2 = Requirement.parse("foo!=0.3a4")
- d = Distribution.from_filename
-
- self.assert_(d("foo-0.3a4.egg") not in r1)
- self.assert_(d("foo-0.3a1.egg") not in r1)
- self.assert_(d("foo-0.3a4.egg") not in r2)
-
- self.assert_(d("foo-0.3a2.egg") in r1)
- self.assert_(d("foo-0.3a2.egg") in r2)
- self.assert_(d("foo-0.3a3.egg") in r2)
- self.assert_(d("foo-0.3a5.egg") in r2)
-
- def testDistributeSetuptoolsOverride(self):
- # Plain setuptools or distribute mean we return distribute.
- self.assertEqual(
- Requirement.parse('setuptools').project_name, 'distribute')
- self.assertEqual(
- Requirement.parse('distribute').project_name, 'distribute')
- # setuptools lower than 0.7 means distribute
- self.assertEqual(
- Requirement.parse('setuptools==0.6c9').project_name, 'distribute')
- self.assertEqual(
- Requirement.parse('setuptools==0.6c10').project_name, 'distribute')
- self.assertEqual(
- Requirement.parse('setuptools>=0.6').project_name, 'distribute')
- self.assertEqual(
- Requirement.parse('setuptools < 0.7').project_name, 'distribute')
- # setuptools 0.7 and higher means setuptools.
- self.assertEqual(
- Requirement.parse('setuptools == 0.7').project_name, 'setuptools')
- self.assertEqual(
- Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools')
- self.assertEqual(
- Requirement.parse('setuptools >= 0.7').project_name, 'setuptools')
-
-
-
-
-
-
-
-
-
-
-
-class ParseTests(TestCase):
-
- def testEmptyParse(self):
- self.assertEqual(list(parse_requirements('')), [])
-
- def testYielding(self):
- for inp,out in [
- ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
- (['x\n\n','y'], ['x','y']),
- ]:
- self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
-
- def testSplitting(self):
- self.assertEqual(
- list(
- pkg_resources.split_sections("""
- x
- [Y]
- z
-
- a
- [b ]
- # foo
- c
- [ d]
- [q]
- v
- """
- )
- ),
- [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
- )
- self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
-
- def testSafeName(self):
- self.assertEqual(safe_name("adns-python"), "adns-python")
- self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
- self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
- self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
- self.assertNotEqual(safe_name("peak.web"), "peak-web")
-
- def testSafeVersion(self):
- self.assertEqual(safe_version("1.2-1"), "1.2-1")
- self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha")
- self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
- self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
- self.assertEqual(safe_version("peak.web"), "peak.web")
-
- def testSimpleRequirements(self):
- self.assertEqual(
- list(parse_requirements('Twis-Ted>=1.2-1')),
- [Requirement('Twis-Ted',[('>=','1.2-1')], ())]
- )
- self.assertEqual(
- list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
- [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
- )
- self.assertEqual(
- Requirement.parse("FooBar==1.99a3"),
- Requirement("FooBar", [('==','1.99a3')], ())
- )
- self.assertRaises(ValueError,Requirement.parse,">=2.3")
- self.assertRaises(ValueError,Requirement.parse,"x\\")
- self.assertRaises(ValueError,Requirement.parse,"x==2 q")
- self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
- self.assertRaises(ValueError,Requirement.parse,"#")
-
- def testVersionEquality(self):
- def c(s1,s2):
- p1, p2 = parse_version(s1),parse_version(s2)
- self.assertEqual(p1,p2, (s1,s2,p1,p2))
-
- c('1.2-rc1', '1.2rc1')
- c('0.4', '0.4.0')
- c('0.4.0.0', '0.4.0')
- c('0.4.0-0', '0.4-0')
- c('0pl1', '0.0pl1')
- c('0pre1', '0.0c1')
- c('0.0.0preview1', '0c1')
- c('0.0c1', '0-rc1')
- c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a')
-
- def testVersionOrdering(self):
- def c(s1,s2):
- p1, p2 = parse_version(s1),parse_version(s2)
- self.assert_(p1<p2, (s1,s2,p1,p2))
-
- c('2.1','2.1.1')
- c('2a1','2b0')
- c('2a1','2.1')
- c('2.3a1', '2.3')
- c('2.1-1', '2.1-2')
- c('2.1-1', '2.1.1')
- c('2.1', '2.1pl4')
- c('2.1a0-20040501', '2.1')
- c('1.1', '02.1')
- c('A56','B27')
- c('3.2', '3.2.pl0')
- c('3.2-1', '3.2pl1')
- c('3.2pl1', '3.2pl1-1')
- c('0.4', '4.0')
- c('0.0.4', '0.4.0')
- c('0pl1', '0.4pl1')
- c('2.1.0-rc1','2.1.0')
- c('2.1dev','2.1a0')
-
- torture ="""
- 0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
- 0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
- 0.77.2-1 0.77.1-1 0.77.0-1
- """.split()
-
- for p,v1 in enumerate(torture):
- for v2 in torture[p+1:]:
- c(v2,v1)
-
-
-
-
-
-
-
-
-class ScriptHeaderTests(TestCase):
- non_ascii_exe = '/Users/José/bin/python'
-
- def test_get_script_header(self):
- if not sys.platform.startswith('java') or not is_sh(sys.executable):
- # This test is for non-Jython platforms
- self.assertEqual(get_script_header('#!/usr/local/bin/python'),
- '#!%s\n' % os.path.normpath(sys.executable))
- self.assertEqual(get_script_header('#!/usr/bin/python -x'),
- '#!%s -x\n' % os.path.normpath(sys.executable))
- self.assertEqual(get_script_header('#!/usr/bin/python',
- executable=self.non_ascii_exe),
- '#!%s -x\n' % self.non_ascii_exe)
-
- def test_get_script_header_jython_workaround(self):
- # This test doesn't work with Python 3 in some locales
- if (sys.version_info >= (3,) and os.environ.get("LC_CTYPE")
- in (None, "C", "POSIX")):
- return
- platform = sys.platform
- sys.platform = 'java1.5.0_13'
- stdout = sys.stdout
- try:
- # A mock sys.executable that uses a shebang line (this file)
- exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py')
- self.assertEqual(
- get_script_header('#!/usr/local/bin/python', executable=exe),
- '#!/usr/bin/env %s\n' % exe)
-
- # Ensure we generate what is basically a broken shebang line
- # when there's options, with a warning emitted
- sys.stdout = sys.stderr = StringIO.StringIO()
- self.assertEqual(get_script_header('#!/usr/bin/python -x',
- executable=exe),
- '#!%s -x\n' % exe)
- self.assert_('Unable to adapt shebang line' in sys.stdout.getvalue())
- sys.stdout = sys.stderr = StringIO.StringIO()
- self.assertEqual(get_script_header('#!/usr/bin/python',
- executable=self.non_ascii_exe),
- '#!%s -x\n' % self.non_ascii_exe)
- self.assert_('Unable to adapt shebang line' in sys.stdout.getvalue())
- finally:
- sys.platform = platform
- sys.stdout = stdout
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py
deleted file mode 100755
index 1609ee86..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py
+++ /dev/null
@@ -1,66 +0,0 @@
-"""develop tests
-"""
-import sys
-import os
-import shutil
-import unittest
-import tempfile
-
-from setuptools.sandbox import DirectorySandbox, SandboxViolation
-
-def has_win32com():
- """
- Run this to determine if the local machine has win32com, and if it
- does, include additional tests.
- """
- if not sys.platform.startswith('win32'):
- return False
- try:
- mod = __import__('win32com')
- except ImportError:
- return False
- return True
-
-class TestSandbox(unittest.TestCase):
-
- def setUp(self):
- self.dir = tempfile.mkdtemp()
-
- def tearDown(self):
- shutil.rmtree(self.dir)
-
- def test_devnull(self):
- if sys.version < '2.4':
- return
- sandbox = DirectorySandbox(self.dir)
- sandbox.run(self._file_writer(os.devnull))
-
- def _file_writer(path):
- def do_write():
- f = open(path, 'w')
- f.write('xxx')
- f.close()
- return do_write
-
- _file_writer = staticmethod(_file_writer)
-
- if has_win32com():
- def test_win32com(self):
- """
- win32com should not be prevented from caching COM interfaces
- in gen_py.
- """
- import win32com
- gen_py = win32com.__gen_path__
- target = os.path.join(gen_py, 'test_write')
- sandbox = DirectorySandbox(self.dir)
- try:
- try:
- sandbox.run(self._file_writer(target))
- except SandboxViolation:
- self.fail("Could not create gen_py file due to SandboxViolation")
- finally:
- if os.path.exists(target): os.remove(target)
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py
deleted file mode 100755
index 8b2dc892..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""build_ext tests
-"""
-import sys, os, shutil, tempfile, unittest, site, zipfile
-from setuptools.command.upload_docs import upload_docs
-from setuptools.dist import Distribution
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(name='foo')
-"""
-
-class TestUploadDocsTest(unittest.TestCase):
- def setUp(self):
- self.dir = tempfile.mkdtemp()
- setup = os.path.join(self.dir, 'setup.py')
- f = open(setup, 'w')
- f.write(SETUP_PY)
- f.close()
- self.old_cwd = os.getcwd()
- os.chdir(self.dir)
-
- self.upload_dir = os.path.join(self.dir, 'build')
- os.mkdir(self.upload_dir)
-
- # A test document.
- f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
- f.write("Hello world.")
- f.close()
-
- # An empty folder.
- os.mkdir(os.path.join(self.upload_dir, 'empty'))
-
- if sys.version >= "2.6":
- self.old_base = site.USER_BASE
- site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
- self.old_site = site.USER_SITE
- site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
-
- def tearDown(self):
- os.chdir(self.old_cwd)
- shutil.rmtree(self.dir)
- if sys.version >= "2.6":
- shutil.rmtree(site.USER_BASE)
- shutil.rmtree(site.USER_SITE)
- site.USER_BASE = self.old_base
- site.USER_SITE = self.old_site
-
- def test_create_zipfile(self):
- # Test to make sure zipfile creation handles common cases.
- # This explicitly includes a folder containing an empty folder.
-
- dist = Distribution()
-
- cmd = upload_docs(dist)
- cmd.upload_dir = self.upload_dir
- zip_file = cmd.create_zipfile()
-
- assert zipfile.is_zipfile(zip_file)
-
- zip_f = zipfile.ZipFile(zip_file) # woh...
-
- assert zip_f.namelist() == ['index.html']
-
-
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py
deleted file mode 100755
index 80e084b2..00000000
--- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py
+++ /dev/null
@@ -1,82 +0,0 @@
-def __boot():
- import sys, imp, os, os.path
- PYTHONPATH = os.environ.get('PYTHONPATH')
- if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
- PYTHONPATH = []
- else:
- PYTHONPATH = PYTHONPATH.split(os.pathsep)
-
- pic = getattr(sys,'path_importer_cache',{})
- stdpath = sys.path[len(PYTHONPATH):]
- mydir = os.path.dirname(__file__)
- #print "searching",stdpath,sys.path
-
- for item in stdpath:
- if item==mydir or not item:
- continue # skip if current dir. on Windows, or my own directory
- importer = pic.get(item)
- if importer is not None:
- loader = importer.find_module('site')
- if loader is not None:
- # This should actually reload the current module
- loader.load_module('site')
- break
- else:
- try:
- stream, path, descr = imp.find_module('site',[item])
- except ImportError:
- continue
- if stream is None:
- continue
- try:
- # This should actually reload the current module
- imp.load_module('site',stream,path,descr)
- finally:
- stream.close()
- break
- else:
- raise ImportError("Couldn't find the real 'site' module")
-
- #print "loaded", __file__
-
- known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
-
- oldpos = getattr(sys,'__egginsert',0) # save old insertion position
- sys.__egginsert = 0 # and reset the current one
-
- for item in PYTHONPATH:
- addsitedir(item)
-
- sys.__egginsert += oldpos # restore effective old position
-
- d,nd = makepath(stdpath[0])
- insert_at = None
- new_path = []
-
- for item in sys.path:
- p,np = makepath(item)
-
- if np==nd and insert_at is None:
- # We've hit the first 'system' path entry, so added entries go here
- insert_at = len(new_path)
-
- if np in known_paths or insert_at is None:
- new_path.append(item)
- else:
- # new path after the insert point, back-insert it
- new_path.insert(insert_at, item)
- insert_at += 1
-
- sys.path[:] = new_path
-
-if __name__=='site':
- __boot()
- del __boot
-
-
-
-
-
-
-
-
diff --git a/lib/python2.7/site-packages/easy-install.pth b/lib/python2.7/site-packages/easy-install.pth
deleted file mode 100644
index 492c156f..00000000
--- a/lib/python2.7/site-packages/easy-install.pth
+++ /dev/null
@@ -1,4 +0,0 @@
-import sys; sys.__plen = len(sys.path)
-./distribute-0.6.14-py2.7.egg
-./pip-0.8.1-py2.7.egg
-import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO
deleted file mode 100644
index fb0a3680..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO
+++ /dev/null
@@ -1,348 +0,0 @@
-Metadata-Version: 1.0
-Name: pip
-Version: 0.8.1
-Summary: pip installs packages. Python packages. An easy_install replacement
-Home-page: http://pip.openplans.org
-Author: Ian Bicking
-Author-email: python-virtualenv@groups.google.com
-License: MIT
-Description: The main website for pip is `pip.openplans.org
- <http://pip.openplans.org>`_. You can also install
- the `in-development version <http://bitbucket.org/ianb/pip/get/tip.gz#egg=pip-dev>`_
- of pip with ``easy_install pip==dev``.
-
-
- Introduction
- ------------
-
- pip installs packages. Python packages.
-
- If you use `virtualenv <http://virtualenv.openplans.org>`__ -- a tool
- for installing libraries in a local and isolated manner -- you'll
- automatically get a copy of pip. Free bonus!
-
- Once you have pip, you can use it like this::
-
- $ pip install SomePackage
-
- SomePackage is some package you'll find on `PyPI
- <http://pypi.python.org/pypi/>`_. This installs the package and all
- its dependencies.
-
- pip does other stuff too, with packages, but install is the biggest
- one. You can ``pip uninstall`` too.
-
- You can also install from a URL (that points to a tar or zip file),
- install from some version control system (use URLs like
- ``hg+http://domain/repo`` -- or prefix ``git+``, ``svn+`` etc). pip
- knows a bunch of stuff about revisions and stuff, so if you need to do
- things like install a very specific revision from a repository pip can
- do that too.
-
- If you've ever used ``python setup.py develop``, you can do something
- like that with ``pip install -e ./`` -- this works with packages that
- use ``distutils`` too (usually this only works with Setuptools
- projects).
-
- You can use ``pip install --upgrade SomePackage`` to upgrade to a
- newer version, or ``pip install SomePackage==1.0.4`` to install a very
- specific version.
-
- Pip Compared To easy_install
- ----------------------------
-
- pip is a replacement for `easy_install
- <http://peak.telecommunity.com/DevCenter/EasyInstall>`_. It uses mostly the
- same techniques for finding packages, so packages that were made
- easy_installable should be pip-installable as well.
-
- pip is meant to improve on easy_install. Some of the improvements:
-
- * All packages are downloaded before installation. Partially-completed
- installation doesn't occur as a result.
-
- * Care is taken to present useful output on the console.
-
- * The reasons for actions are kept track of. For instance, if a package is
- being installed, pip keeps track of why that package was required.
-
- * Error messages should be useful.
-
- * The code is relatively concise and cohesive, making it easier to use
- programmatically.
-
- * Packages don't have to be installed as egg archives, they can be installed
- flat (while keeping the egg metadata).
-
- * Native support for other version control systems (Git, Mercurial and Bazaar)
-
- * Uninstallation of packages.
-
- * Simple to define fixed sets of requirements and reliably reproduce a
- set of packages.
-
- pip doesn't do everything that easy_install does. Specifically:
-
- * It cannot install from eggs. It only installs from source. (In the
- future it would be good if it could install binaries from Windows ``.exe``
- or ``.msi`` -- binary install on other platforms is not a priority.)
-
- * It doesn't understand Setuptools extras (like ``package[test]``). This should
- be added eventually.
-
- * It is incompatible with some packages that extensively customize distutils
- or setuptools in their ``setup.py`` files.
-
- pip is complementary with `virtualenv
- <http://pypi.python.org/pypi/virtualenv>`__, and it is encouraged that you use
- virtualenv to isolate your installation.
-
- Community
- ---------
-
- The homepage for pip is temporarily located `on PyPI
- <http://pypi.python.org/pypi/pip>`_ -- a more proper homepage will
- follow. Bugs can go on the `pip issue tracker
- <http://bitbucket.org/ianb/pip/issues/>`_. Discussion should happen on the
- `virtualenv email group
- <http://groups.google.com/group/python-virtualenv?hl=en>`_.
-
- Uninstall
- ---------
-
- pip is able to uninstall most installed packages with ``pip uninstall
- package-name``.
-
- Known exceptions include pure-distutils packages installed with
- ``python setup.py install`` (such packages leave behind no metadata allowing
- determination of what files were installed), and script wrappers installed
- by develop-installs (``python setup.py develop``).
-
- pip also performs an automatic uninstall of an old version of a package
- before upgrading to a newer version, so outdated files (and egg-info data)
- from conflicting versions aren't left hanging around to cause trouble. The
- old version of the package is automatically restored if the new version
- fails to download or install.
-
- .. _`requirements file`:
-
- Requirements Files
- ------------------
-
- When installing software, and Python packages in particular, it's common that
- you get a lot of libraries installed. You just did ``easy_install MyPackage``
- and you get a dozen packages. Each of these packages has its own version.
-
- Maybe you ran that installation and it works. Great! Will it keep working?
- Did you have to provide special options to get it to find everything? Did you
- have to install a bunch of other optional pieces? Most of all, will you be able
- to do it again? Requirements files give you a way to create an *environment*:
- a *set* of packages that work together.
-
- If you've ever tried to setup an application on a new system, or with slightly
- updated pieces, and had it fail, pip requirements are for you. If you
- haven't had this problem then you will eventually, so pip requirements are
- for you too -- requirements make explicit, repeatable installation of packages.
-
- So what are requirements files? They are very simple: lists of packages to
- install. Instead of running something like ``pip MyApp`` and getting
- whatever libraries come along, you can create a requirements file something like::
-
- MyApp
- Framework==0.9.4
- Library>=0.2
-
- Then, regardless of what MyApp lists in ``setup.py``, you'll get a
- specific version of Framework (0.9.4) and at least the 0.2 version of
- Library. (You might think you could list these specific versions in
- MyApp's ``setup.py`` -- but if you do that you'll have to edit MyApp
- if you want to try a new version of Framework, or release a new
- version of MyApp if you determine that Library 0.3 doesn't work with
- your application.) You can also add optional libraries and support
- tools that MyApp doesn't strictly require, giving people a set of
- recommended libraries.
-
- You can also include "editable" packages -- packages that are checked out from
- Subversion, Git, Mercurial and Bazaar. These are just like using the ``-e``
- option to pip. They look like::
-
- -e svn+http://myrepo/svn/MyApp#egg=MyApp
-
- You have to start the URL with ``svn+`` (``git+``, ``hg+`` or ``bzr+``), and
- you have to include ``#egg=Package`` so pip knows what to expect at that URL.
- You can also include ``@rev`` in the URL, e.g., ``@275`` to check out
- revision 275.
-
- Requirement files are mostly *flat*. Maybe ``MyApp`` requires
- ``Framework``, and ``Framework`` requires ``Library``. I encourage
- you to still list all these in a single requirement file; it is the
- nature of Python programs that there are implicit bindings *directly*
- between MyApp and Library. For instance, Framework might expose one
- of Library's objects, and so if Library is updated it might directly
- break MyApp. If that happens you can update the requirements file to
- force an earlier version of Library, and you can do that without
- having to re-release MyApp at all.
-
- Read the `requirements file format <http://pip.openplans.org/requirement-format.html>`_ to
- learn about other features.
-
- Freezing Requirements
- ---------------------
-
- So you have a working set of packages, and you want to be able to install them
- elsewhere. `Requirements files`_ let you install exact versions, but it won't
- tell you what all the exact versions are.
-
- To create a new requirements file from a known working environment, use::
-
- $ pip freeze > stable-req.txt
-
- This will write a listing of *all* installed libraries to ``stable-req.txt``
- with exact versions for every library. You may want to edit the file down after
- generating (e.g., to eliminate unnecessary libraries), but it'll give you a
- stable starting point for constructing your requirements file.
-
- You can also give it an existing requirements file, and it will use that as a
- sort of template for the new file. So if you do::
-
- $ pip freeze -r devel-req.txt > stable-req.txt
-
- it will keep the packages listed in ``devel-req.txt`` in order and preserve
- comments.
-
- Bundles
- -------
-
- Another way to distribute a set of libraries is a bundle format (specific to
- pip). This format is not stable at this time (there simply hasn't been
- any feedback, nor a great deal of thought). A bundle file contains all the
- source for your package, and you can have pip install them all together.
- Once you have the bundle file further network access won't be necessary. To
- build a bundle file, do::
-
- $ pip bundle MyApp.pybundle MyApp
-
- (Using a `requirements file`_ would be wise.) Then someone else can get the
- file ``MyApp.pybundle`` and run::
-
- $ pip install MyApp.pybundle
-
- This is *not* a binary format. This only packages source. If you have binary
- packages, then the person who installs the files will have to have a compiler,
- any necessary headers installed, etc. Binary packages are hard, this is
- relatively easy.
-
- Using pip with virtualenv
- -------------------------
-
- pip is most nutritious when used with `virtualenv
- <http://pypi.python.org/pypi/virtualenv>`__. One of the reasons pip
- doesn't install "multi-version" eggs is that virtualenv removes much of the need
- for it. Because pip is installed by virtualenv, just use
- ``path/to/my/environment/bin/pip`` to install things into that
- specific environment.
-
- To tell pip to only run if there is a virtualenv currently activated,
- and to bail if not, use::
-
- export PIP_REQUIRE_VIRTUALENV=true
-
- To tell pip to automatically use the currently active virtualenv::
-
- export PIP_RESPECT_VIRTUALENV=true
-
- Providing an environment with ``-E`` will be ignored.
-
- Using pip with virtualenvwrapper
- ---------------------------------
-
- If you are using `virtualenvwrapper
- <http://www.doughellmann.com/projects/virtualenvwrapper/>`_, you might
- want pip to automatically create its virtualenvs in your
- ``$WORKON_HOME``.
-
- You can tell pip to do so by defining ``PIP_VIRTUALENV_BASE`` in your
- environment and setting it to the same value as that of
- ``$WORKON_HOME``.
-
- Do so by adding the line::
-
- export PIP_VIRTUALENV_BASE=$WORKON_HOME
-
- in your .bashrc under the line starting with ``export WORKON_HOME``.
-
- Using pip with buildout
- -----------------------
-
- If you are using `zc.buildout
- <http://pypi.python.org/pypi/zc.buildout>`_ you should look at
- `gp.recipe.pip <http://pypi.python.org/pypi/gp.recipe.pip>`_ as an
- option to use pip and virtualenv in your buildouts.
-
- Command line completion
- -----------------------
-
- pip comes with support for command line completion in bash and zsh and
- allows you tab complete commands and options. To enable it you simply
- need copy the required shell script to the your shell startup file
- (e.g. ``.profile`` or ``.zprofile``) by running the special ``completion``
- command, e.g. for bash::
-
- $ pip completion --bash >> ~/.profile
-
- And for zsh::
-
- $ pip completion --zsh >> ~/.zprofile
-
- Alternatively, you can use the result of the ``completion`` command
- directly with the eval function of you shell, e.g. by adding::
-
- eval "`pip completion --bash`"
-
- to your startup file.
-
- Searching for packages
- ----------------------
-
- pip can search the `Python Package Index <http://pypi.python.org/pypi>`_ (PyPI)
- for packages using the ``pip search`` command. To search, run::
-
- $ pip search "query"
-
- The query will be used to search the names and summaries of all packages
- indexed.
-
- pip searches http://pypi.python.org/pypi by default but alternative indexes
- can be searched by using the ``--index`` flag.
-
- Mirror support
- --------------
-
- The `PyPI mirroring infrastructure <http://pypi.python.org/mirrors>`_ as
- described in `PEP 381 <http://www.python.org/dev/peps/pep-0381/>`_ can be
- used by passing the ``--use-mirrors`` option to the install command.
- Alternatively, you can use the other ways to configure pip, e.g.::
-
- $ export PIP_USE_MIRRORS=true
-
- If enabled, pip will automatically query the DNS entry of the mirror index URL
- to find the list of mirrors to use. In case you want to override this list,
- please use the ``--mirrors`` option of the install command, or add to your pip
- configuration file::
-
- [install]
- use-mirrors = true
- mirrors =
- http://d.pypi.python.org
- http://b.pypi.python.org
-
-Keywords: easy_install distutils setuptools egg virtualenv
-Platform: UNKNOWN
-Classifier: Development Status :: 4 - Beta
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Topic :: Software Development :: Build Tools
-Classifier: Programming Language :: Python :: 2.4
-Classifier: Programming Language :: Python :: 2.5
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt
deleted file mode 100644
index 3a068547..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt
+++ /dev/null
@@ -1,57 +0,0 @@
-MANIFEST.in
-setup.cfg
-setup.py
-docs/branches.txt
-docs/ci-server-step-by-step.txt
-docs/configuration.txt
-docs/how-to-contribute.txt
-docs/index.txt
-docs/license.txt
-docs/news.txt
-docs/requirement-format.txt
-docs/running-tests.txt
-docs/_build/branches.html
-docs/_build/ci-server-step-by-step.html
-docs/_build/configuration.html
-docs/_build/how-to-contribute.html
-docs/_build/index.html
-docs/_build/license.html
-docs/_build/news.html
-docs/_build/requirement-format.html
-docs/_build/running-tests.html
-docs/_build/search.html
-pip/__init__.py
-pip/_pkgutil.py
-pip/backwardcompat.py
-pip/basecommand.py
-pip/baseparser.py
-pip/download.py
-pip/exceptions.py
-pip/index.py
-pip/locations.py
-pip/log.py
-pip/req.py
-pip/runner.py
-pip/util.py
-pip/venv.py
-pip.egg-info/PKG-INFO
-pip.egg-info/SOURCES.txt
-pip.egg-info/dependency_links.txt
-pip.egg-info/entry_points.txt
-pip.egg-info/not-zip-safe
-pip.egg-info/top_level.txt
-pip/commands/__init__.py
-pip/commands/bundle.py
-pip/commands/completion.py
-pip/commands/freeze.py
-pip/commands/help.py
-pip/commands/install.py
-pip/commands/search.py
-pip/commands/uninstall.py
-pip/commands/unzip.py
-pip/commands/zip.py
-pip/vcs/__init__.py
-pip/vcs/bazaar.py
-pip/vcs/git.py
-pip/vcs/mercurial.py
-pip/vcs/subversion.py \ No newline at end of file
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt
deleted file mode 100644
index 8b137891..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt
deleted file mode 100644
index 5f7b7cf9..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-[console_scripts]
-pip = pip:main
-pip-2.7 = pip:main
-
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe
deleted file mode 100644
index 8b137891..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt
deleted file mode 100644
index a1b589e3..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py
deleted file mode 100755
index c5de5c9a..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py
+++ /dev/null
@@ -1,261 +0,0 @@
-#!/usr/bin/env python
-import os
-import optparse
-
-import subprocess
-import sys
-import re
-import difflib
-
-from pip.basecommand import command_dict, load_command, load_all_commands, command_names
-from pip.baseparser import parser
-from pip.exceptions import InstallationError
-from pip.log import logger
-from pip.util import get_installed_distributions
-from pip.backwardcompat import walk_packages
-
-
-def autocomplete():
- """Command and option completion for the main option parser (and options)
- and its subcommands (and options).
-
- Enable by sourcing one of the completion shell scripts (bash or zsh).
- """
- # Don't complete if user hasn't sourced bash_completion file.
- if 'PIP_AUTO_COMPLETE' not in os.environ:
- return
- cwords = os.environ['COMP_WORDS'].split()[1:]
- cword = int(os.environ['COMP_CWORD'])
- try:
- current = cwords[cword-1]
- except IndexError:
- current = ''
- load_all_commands()
- subcommands = [cmd for cmd, cls in command_dict.items() if not cls.hidden]
- options = []
- # subcommand
- try:
- subcommand_name = [w for w in cwords if w in subcommands][0]
- except IndexError:
- subcommand_name = None
- # subcommand options
- if subcommand_name:
- # special case: 'help' subcommand has no options
- if subcommand_name == 'help':
- sys.exit(1)
- # special case: list locally installed dists for uninstall command
- if subcommand_name == 'uninstall' and not current.startswith('-'):
- installed = []
- lc = current.lower()
- for dist in get_installed_distributions(local_only=True):
- if dist.key.startswith(lc) and dist.key not in cwords[1:]:
- installed.append(dist.key)
- # if there are no dists installed, fall back to option completion
- if installed:
- for dist in installed:
- print dist
- sys.exit(1)
- subcommand = command_dict.get(subcommand_name)
- options += [(opt.get_opt_string(), opt.nargs)
- for opt in subcommand.parser.option_list
- if opt.help != optparse.SUPPRESS_HELP]
- # filter out previously specified options from available options
- prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
- options = filter(lambda (x, v): x not in prev_opts, options)
- # filter options by current input
- options = [(k, v) for k, v in options if k.startswith(current)]
- for option in options:
- opt_label = option[0]
- # append '=' to options which require args
- if option[1]:
- opt_label += '='
- print opt_label
- else:
- # show options of main parser only when necessary
- if current.startswith('-') or current.startswith('--'):
- subcommands += [opt.get_opt_string()
- for opt in parser.option_list
- if opt.help != optparse.SUPPRESS_HELP]
- print ' '.join(filter(lambda x: x.startswith(current), subcommands))
- sys.exit(1)
-
-
-def version_control():
- # Import all the version control support modules:
- from pip import vcs
- for importer, modname, ispkg in \
- walk_packages(path=vcs.__path__, prefix=vcs.__name__+'.'):
- __import__(modname)
-
-
-def main(initial_args=None):
- if initial_args is None:
- initial_args = sys.argv[1:]
- autocomplete()
- version_control()
- options, args = parser.parse_args(initial_args)
- if options.help and not args:
- args = ['help']
- if not args:
- parser.error('You must give a command (use "pip help" to see a list of commands)')
- command = args[0].lower()
- load_command(command)
- if command not in command_dict:
- close_commands = difflib.get_close_matches(command, command_names())
- if close_commands:
- guess = close_commands[0]
- if args[1:]:
- guess = "%s %s" % (guess, " ".join(args[1:]))
- else:
- guess = 'install %s' % command
- error_dict = {'arg': command, 'guess': guess,
- 'script': os.path.basename(sys.argv[0])}
- parser.error('No command by the name %(script)s %(arg)s\n '
- '(maybe you meant "%(script)s %(guess)s")' % error_dict)
- command = command_dict[command]
- return command.main(initial_args, args[1:], options)
-
-
-############################################################
-## Writing freeze files
-
-
-class FrozenRequirement(object):
-
- def __init__(self, name, req, editable, comments=()):
- self.name = name
- self.req = req
- self.editable = editable
- self.comments = comments
-
- _rev_re = re.compile(r'-r(\d+)$')
- _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
-
- @classmethod
- def from_dist(cls, dist, dependency_links, find_tags=False):
- location = os.path.normcase(os.path.abspath(dist.location))
- comments = []
- from pip.vcs import vcs, get_src_requirement
- if vcs.get_backend_name(location):
- editable = True
- req = get_src_requirement(dist, location, find_tags)
- if req is None:
- logger.warn('Could not determine repository location of %s' % location)
- comments.append('## !! Could not determine repository location')
- req = dist.as_requirement()
- editable = False
- else:
- editable = False
- req = dist.as_requirement()
- specs = req.specs
- assert len(specs) == 1 and specs[0][0] == '=='
- version = specs[0][1]
- ver_match = cls._rev_re.search(version)
- date_match = cls._date_re.search(version)
- if ver_match or date_match:
- svn_backend = vcs.get_backend('svn')
- if svn_backend:
- svn_location = svn_backend(
- ).get_location(dist, dependency_links)
- if not svn_location:
- logger.warn(
- 'Warning: cannot find svn location for %s' % req)
- comments.append('## FIXME: could not find svn URL in dependency_links for this package:')
- else:
- comments.append('# Installing as editable to satisfy requirement %s:' % req)
- if ver_match:
- rev = ver_match.group(1)
- else:
- rev = '{%s}' % date_match.group(1)
- editable = True
- req = '%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist))
- return cls(dist.project_name, req, editable, comments)
-
- @staticmethod
- def egg_name(dist):
- name = dist.egg_name()
- match = re.search(r'-py\d\.\d$', name)
- if match:
- name = name[:match.start()]
- return name
-
- def __str__(self):
- req = self.req
- if self.editable:
- req = '-e %s' % req
- return '\n'.join(list(self.comments)+[str(req)])+'\n'
-
-############################################################
-## Requirement files
-
-
-def call_subprocess(cmd, show_stdout=True,
- filter_stdout=None, cwd=None,
- raise_on_returncode=True,
- command_level=logger.DEBUG, command_desc=None,
- extra_environ=None):
- if command_desc is None:
- cmd_parts = []
- for part in cmd:
- if ' ' in part or '\n' in part or '"' in part or "'" in part:
- part = '"%s"' % part.replace('"', '\\"')
- cmd_parts.append(part)
- command_desc = ' '.join(cmd_parts)
- if show_stdout:
- stdout = None
- else:
- stdout = subprocess.PIPE
- logger.log(command_level, "Running command %s" % command_desc)
- env = os.environ.copy()
- if extra_environ:
- env.update(extra_environ)
- try:
- proc = subprocess.Popen(
- cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
- cwd=cwd, env=env)
- except Exception, e:
- logger.fatal(
- "Error %s while executing command %s" % (e, command_desc))
- raise
- all_output = []
- if stdout is not None:
- stdout = proc.stdout
- while 1:
- line = stdout.readline()
- if not line:
- break
- line = line.rstrip()
- all_output.append(line + '\n')
- if filter_stdout:
- level = filter_stdout(line)
- if isinstance(level, tuple):
- level, line = level
- logger.log(level, line)
- if not logger.stdout_level_matches(level):
- logger.show_progress()
- else:
- logger.info(line)
- else:
- returned_stdout, returned_stderr = proc.communicate()
- all_output = [returned_stdout or '']
- proc.wait()
- if proc.returncode:
- if raise_on_returncode:
- if all_output:
- logger.notify('Complete output from command %s:' % command_desc)
- logger.notify('\n'.join(all_output) + '\n----------------------------------------')
- raise InstallationError(
- "Command %s failed with error code %s"
- % (command_desc, proc.returncode))
- else:
- logger.warn(
- "Command %s had error code %s"
- % (command_desc, proc.returncode))
- if stdout is not None:
- return ''.join(all_output)
-
-
-if __name__ == '__main__':
- exit = main()
- if exit:
- sys.exit(exit)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py
deleted file mode 100755
index f8fb8aa6..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py
+++ /dev/null
@@ -1,589 +0,0 @@
-"""Utilities to support packages."""
-
-# NOTE: This module must remain compatible with Python 2.3, as it is shared
-# by setuptools for distribution with Python 2.3 and up.
-
-import os
-import sys
-import imp
-import os.path
-from types import ModuleType
-
-__all__ = [
- 'get_importer', 'iter_importers', 'get_loader', 'find_loader',
- 'walk_packages', 'iter_modules',
- 'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
-]
-
-
-def read_code(stream):
- # This helper is needed in order for the PEP 302 emulation to
- # correctly handle compiled files
- import marshal
-
- magic = stream.read(4)
- if magic != imp.get_magic():
- return None
-
- stream.read(4) # Skip timestamp
- return marshal.load(stream)
-
-
-def simplegeneric(func):
- """Make a trivial single-dispatch generic function"""
- registry = {}
-
- def wrapper(*args, **kw):
- ob = args[0]
- try:
- cls = ob.__class__
- except AttributeError:
- cls = type(ob)
- try:
- mro = cls.__mro__
- except AttributeError:
- try:
-
- class cls(cls, object):
- pass
-
- mro = cls.__mro__[1:]
- except TypeError:
- mro = object, # must be an ExtensionClass or some such :(
- for t in mro:
- if t in registry:
- return registry[t](*args, **kw)
- else:
- return func(*args, **kw)
- try:
- wrapper.__name__ = func.__name__
- except (TypeError, AttributeError):
- pass # Python 2.3 doesn't allow functions to be renamed
-
- def register(typ, func=None):
- if func is None:
- return lambda f: register(typ, f)
- registry[typ] = func
- return func
-
- wrapper.__dict__ = func.__dict__
- wrapper.__doc__ = func.__doc__
- wrapper.register = register
- return wrapper
-
-
-def walk_packages(path=None, prefix='', onerror=None):
- """Yields (module_loader, name, ispkg) for all modules recursively
- on path, or, if path is None, all accessible modules.
-
- 'path' should be either None or a list of paths to look for
- modules in.
-
- 'prefix' is a string to output on the front of every module name
- on output.
-
- Note that this function must import all *packages* (NOT all
- modules!) on the given path, in order to access the __path__
- attribute to find submodules.
-
- 'onerror' is a function which gets called with one argument (the
- name of the package which was being imported) if any exception
- occurs while trying to import a package. If no onerror function is
- supplied, ImportErrors are caught and ignored, while all other
- exceptions are propagated, terminating the search.
-
- Examples:
-
- # list all modules python can access
- walk_packages()
-
- # list all submodules of ctypes
- walk_packages(ctypes.__path__, ctypes.__name__+'.')
- """
-
- def seen(p, m={}):
- if p in m:
- return True
- m[p] = True
-
- for importer, name, ispkg in iter_modules(path, prefix):
- yield importer, name, ispkg
-
- if ispkg:
- try:
- __import__(name)
- except ImportError:
- if onerror is not None:
- onerror(name)
- except Exception:
- if onerror is not None:
- onerror(name)
- else:
- raise
- else:
- path = getattr(sys.modules[name], '__path__', None) or []
-
- # don't traverse path items we've seen before
- path = [p for p in path if not seen(p)]
-
- for item in walk_packages(path, name+'.', onerror):
- yield item
-
-
-def iter_modules(path=None, prefix=''):
- """Yields (module_loader, name, ispkg) for all submodules on path,
- or, if path is None, all top-level modules on sys.path.
-
- 'path' should be either None or a list of paths to look for
- modules in.
-
- 'prefix' is a string to output on the front of every module name
- on output.
- """
-
- if path is None:
- importers = iter_importers()
- else:
- importers = map(get_importer, path)
-
- yielded = {}
- for i in importers:
- for name, ispkg in iter_importer_modules(i, prefix):
- if name not in yielded:
- yielded[name] = 1
- yield i, name, ispkg
-
-
-#@simplegeneric
-def iter_importer_modules(importer, prefix=''):
- if not hasattr(importer, 'iter_modules'):
- return []
- return importer.iter_modules(prefix)
-
-iter_importer_modules = simplegeneric(iter_importer_modules)
-
-
-class ImpImporter:
- """PEP 302 Importer that wraps Python's "classic" import algorithm
-
- ImpImporter(dirname) produces a PEP 302 importer that searches that
- directory. ImpImporter(None) produces a PEP 302 importer that searches
- the current sys.path, plus any modules that are frozen or built-in.
-
- Note that ImpImporter does not currently support being used by placement
- on sys.meta_path.
- """
-
- def __init__(self, path=None):
- self.path = path
-
- def find_module(self, fullname, path=None):
- # Note: we ignore 'path' argument since it is only used via meta_path
- subname = fullname.split(".")[-1]
- if subname != fullname and self.path is None:
- return None
- if self.path is None:
- path = None
- else:
- path = [os.path.realpath(self.path)]
- try:
- file, filename, etc = imp.find_module(subname, path)
- except ImportError:
- return None
- return ImpLoader(fullname, file, filename, etc)
-
- def iter_modules(self, prefix=''):
- if self.path is None or not os.path.isdir(self.path):
- return
-
- yielded = {}
- import inspect
-
- filenames = os.listdir(self.path)
- filenames.sort() # handle packages before same-named modules
-
- for fn in filenames:
- modname = inspect.getmodulename(fn)
- if modname=='__init__' or modname in yielded:
- continue
-
- path = os.path.join(self.path, fn)
- ispkg = False
-
- if not modname and os.path.isdir(path) and '.' not in fn:
- modname = fn
- for fn in os.listdir(path):
- subname = inspect.getmodulename(fn)
- if subname=='__init__':
- ispkg = True
- break
- else:
- continue # not a package
-
- if modname and '.' not in modname:
- yielded[modname] = 1
- yield prefix + modname, ispkg
-
-
-class ImpLoader:
- """PEP 302 Loader that wraps Python's "classic" import algorithm
- """
- code = source = None
-
- def __init__(self, fullname, file, filename, etc):
- self.file = file
- self.filename = filename
- self.fullname = fullname
- self.etc = etc
-
- def load_module(self, fullname):
- self._reopen()
- try:
- mod = imp.load_module(fullname, self.file, self.filename, self.etc)
- finally:
- if self.file:
- self.file.close()
- # Note: we don't set __loader__ because we want the module to look
- # normal; i.e. this is just a wrapper for standard import machinery
- return mod
-
- def get_data(self, pathname):
- return open(pathname, "rb").read()
-
- def _reopen(self):
- if self.file and self.file.closed:
- mod_type = self.etc[2]
- if mod_type==imp.PY_SOURCE:
- self.file = open(self.filename, 'rU')
- elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
- self.file = open(self.filename, 'rb')
-
- def _fix_name(self, fullname):
- if fullname is None:
- fullname = self.fullname
- elif fullname != self.fullname:
- raise ImportError("Loader for module %s cannot handle "
- "module %s" % (self.fullname, fullname))
- return fullname
-
- def is_package(self, fullname):
- fullname = self._fix_name(fullname)
- return self.etc[2]==imp.PKG_DIRECTORY
-
- def get_code(self, fullname=None):
- fullname = self._fix_name(fullname)
- if self.code is None:
- mod_type = self.etc[2]
- if mod_type==imp.PY_SOURCE:
- source = self.get_source(fullname)
- self.code = compile(source, self.filename, 'exec')
- elif mod_type==imp.PY_COMPILED:
- self._reopen()
- try:
- self.code = read_code(self.file)
- finally:
- self.file.close()
- elif mod_type==imp.PKG_DIRECTORY:
- self.code = self._get_delegate().get_code()
- return self.code
-
- def get_source(self, fullname=None):
- fullname = self._fix_name(fullname)
- if self.source is None:
- mod_type = self.etc[2]
- if mod_type==imp.PY_SOURCE:
- self._reopen()
- try:
- self.source = self.file.read()
- finally:
- self.file.close()
- elif mod_type==imp.PY_COMPILED:
- if os.path.exists(self.filename[:-1]):
- f = open(self.filename[:-1], 'rU')
- self.source = f.read()
- f.close()
- elif mod_type==imp.PKG_DIRECTORY:
- self.source = self._get_delegate().get_source()
- return self.source
-
- def _get_delegate(self):
- return ImpImporter(self.filename).find_module('__init__')
-
- def get_filename(self, fullname=None):
- fullname = self._fix_name(fullname)
- mod_type = self.etc[2]
- if self.etc[2]==imp.PKG_DIRECTORY:
- return self._get_delegate().get_filename()
- elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
- return self.filename
- return None
-
-
-try:
- import zipimport
- from zipimport import zipimporter
-
- def iter_zipimport_modules(importer, prefix=''):
- dirlist = zipimport._zip_directory_cache[importer.archive].keys()
- dirlist.sort()
- _prefix = importer.prefix
- plen = len(_prefix)
- yielded = {}
- import inspect
- for fn in dirlist:
- if not fn.startswith(_prefix):
- continue
-
- fn = fn[plen:].split(os.sep)
-
- if len(fn)==2 and fn[1].startswith('__init__.py'):
- if fn[0] not in yielded:
- yielded[fn[0]] = 1
- yield fn[0], True
-
- if len(fn)!=1:
- continue
-
- modname = inspect.getmodulename(fn[0])
- if modname=='__init__':
- continue
-
- if modname and '.' not in modname and modname not in yielded:
- yielded[modname] = 1
- yield prefix + modname, False
-
- iter_importer_modules.register(zipimporter, iter_zipimport_modules)
-
-except ImportError:
- pass
-
-
-def get_importer(path_item):
- """Retrieve a PEP 302 importer for the given path item
-
- The returned importer is cached in sys.path_importer_cache
- if it was newly created by a path hook.
-
- If there is no importer, a wrapper around the basic import
- machinery is returned. This wrapper is never inserted into
- the importer cache (None is inserted instead).
-
- The cache (or part of it) can be cleared manually if a
- rescan of sys.path_hooks is necessary.
- """
- try:
- importer = sys.path_importer_cache[path_item]
- except KeyError:
- for path_hook in sys.path_hooks:
- try:
- importer = path_hook(path_item)
- break
- except ImportError:
- pass
- else:
- importer = None
- sys.path_importer_cache.setdefault(path_item, importer)
-
- if importer is None:
- try:
- importer = ImpImporter(path_item)
- except ImportError:
- importer = None
- return importer
-
-
-def iter_importers(fullname=""):
- """Yield PEP 302 importers for the given module name
-
- If fullname contains a '.', the importers will be for the package
- containing fullname, otherwise they will be importers for sys.meta_path,
- sys.path, and Python's "classic" import machinery, in that order. If
- the named module is in a package, that package is imported as a side
- effect of invoking this function.
-
- Non PEP 302 mechanisms (e.g. the Windows registry) used by the
- standard import machinery to find files in alternative locations
- are partially supported, but are searched AFTER sys.path. Normally,
- these locations are searched BEFORE sys.path, preventing sys.path
- entries from shadowing them.
-
- For this to cause a visible difference in behaviour, there must
- be a module or package name that is accessible via both sys.path
- and one of the non PEP 302 file system mechanisms. In this case,
- the emulation will find the former version, while the builtin
- import mechanism will find the latter.
-
- Items of the following types can be affected by this discrepancy:
- imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
- """
- if fullname.startswith('.'):
- raise ImportError("Relative module names not supported")
- if '.' in fullname:
- # Get the containing package's __path__
- pkg = '.'.join(fullname.split('.')[:-1])
- if pkg not in sys.modules:
- __import__(pkg)
- path = getattr(sys.modules[pkg], '__path__', None) or []
- else:
- for importer in sys.meta_path:
- yield importer
- path = sys.path
- for item in path:
- yield get_importer(item)
- if '.' not in fullname:
- yield ImpImporter()
-
-
-def get_loader(module_or_name):
- """Get a PEP 302 "loader" object for module_or_name
-
- If the module or package is accessible via the normal import
- mechanism, a wrapper around the relevant part of that machinery
- is returned. Returns None if the module cannot be found or imported.
- If the named module is not already imported, its containing package
- (if any) is imported, in order to establish the package __path__.
-
- This function uses iter_importers(), and is thus subject to the same
- limitations regarding platform-specific special import locations such
- as the Windows registry.
- """
- if module_or_name in sys.modules:
- module_or_name = sys.modules[module_or_name]
- if isinstance(module_or_name, ModuleType):
- module = module_or_name
- loader = getattr(module, '__loader__', None)
- if loader is not None:
- return loader
- fullname = module.__name__
- else:
- fullname = module_or_name
- return find_loader(fullname)
-
-
-def find_loader(fullname):
- """Find a PEP 302 "loader" object for fullname
-
- If fullname contains dots, path must be the containing package's __path__.
- Returns None if the module cannot be found or imported. This function uses
- iter_importers(), and is thus subject to the same limitations regarding
- platform-specific special import locations such as the Windows registry.
- """
- for importer in iter_importers(fullname):
- loader = importer.find_module(fullname)
- if loader is not None:
- return loader
-
- return None
-
-
-def extend_path(path, name):
- """Extend a package's path.
-
- Intended use is to place the following code in a package's __init__.py:
-
- from pkgutil import extend_path
- __path__ = extend_path(__path__, __name__)
-
- This will add to the package's __path__ all subdirectories of
- directories on sys.path named after the package. This is useful
- if one wants to distribute different parts of a single logical
- package as multiple directories.
-
- It also looks for *.pkg files beginning where * matches the name
- argument. This feature is similar to *.pth files (see site.py),
- except that it doesn't special-case lines starting with 'import'.
- A *.pkg file is trusted at face value: apart from checking for
- duplicates, all entries found in a *.pkg file are added to the
- path, regardless of whether they are exist the filesystem. (This
- is a feature.)
-
- If the input path is not a list (as is the case for frozen
- packages) it is returned unchanged. The input path is not
- modified; an extended copy is returned. Items are only appended
- to the copy at the end.
-
- It is assumed that sys.path is a sequence. Items of sys.path that
- are not (unicode or 8-bit) strings referring to existing
- directories are ignored. Unicode items of sys.path that cause
- errors when used as filenames may cause this function to raise an
- exception (in line with os.path.isdir() behavior).
- """
-
- if not isinstance(path, list):
- # This could happen e.g. when this is called from inside a
- # frozen package. Return the path unchanged in that case.
- return path
-
- pname = os.path.join(*name.split('.')) # Reconstitute as relative path
- # Just in case os.extsep != '.'
- sname = os.extsep.join(name.split('.'))
- sname_pkg = sname + os.extsep + "pkg"
- init_py = "__init__" + os.extsep + "py"
-
- path = path[:] # Start with a copy of the existing path
-
- for dir in sys.path:
- if not isinstance(dir, basestring) or not os.path.isdir(dir):
- continue
- subdir = os.path.join(dir, pname)
- # XXX This may still add duplicate entries to path on
- # case-insensitive filesystems
- initfile = os.path.join(subdir, init_py)
- if subdir not in path and os.path.isfile(initfile):
- path.append(subdir)
- # XXX Is this the right thing for subpackages like zope.app?
- # It looks for a file named "zope.app.pkg"
- pkgfile = os.path.join(dir, sname_pkg)
- if os.path.isfile(pkgfile):
- try:
- f = open(pkgfile)
- except IOError, msg:
- sys.stderr.write("Can't open %s: %s\n" %
- (pkgfile, msg))
- else:
- for line in f:
- line = line.rstrip('\n')
- if not line or line.startswith('#'):
- continue
- path.append(line) # Don't check for existence!
- f.close()
-
- return path
-
-
-def get_data(package, resource):
- """Get a resource from a package.
-
- This is a wrapper round the PEP 302 loader get_data API. The package
- argument should be the name of a package, in standard module format
- (foo.bar). The resource argument should be in the form of a relative
- filename, using '/' as the path separator. The parent directory name '..'
- is not allowed, and nor is a rooted name (starting with a '/').
-
- The function returns a binary string, which is the contents of the
- specified resource.
-
- For packages located in the filesystem, which have already been imported,
- this is the rough equivalent of
-
- d = os.path.dirname(sys.modules[package].__file__)
- data = open(os.path.join(d, resource), 'rb').read()
-
- If the package cannot be located or loaded, or it uses a PEP 302 loader
- which does not support get_data(), then None is returned.
- """
-
- loader = get_loader(package)
- if loader is None or not hasattr(loader, 'get_data'):
- return None
- mod = sys.modules.get(package) or loader.load_module(package)
- if mod is None or not hasattr(mod, '__file__'):
- return None
-
- # Modify the resource name to be compatible with the loader.get_data
- # signature - an os.path format "filename" starting with the dirname of
- # the package's __file__
- parts = resource.split('/')
- parts.insert(0, os.path.dirname(mod.__file__))
- resource_name = os.path.join(*parts)
- return loader.get_data(resource_name)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py
deleted file mode 100755
index e7c11f1d..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""Stuff that isn't in some old versions of Python"""
-
-import sys
-import os
-import shutil
-
-__all__ = ['any', 'WindowsError', 'md5', 'copytree']
-
-try:
- WindowsError = WindowsError
-except NameError:
- WindowsError = None
-try:
- from hashlib import md5
-except ImportError:
- import md5 as md5_module
- md5 = md5_module.new
-
-try:
- from pkgutil import walk_packages
-except ImportError:
- # let's fall back as long as we can
- from _pkgutil import walk_packages
-
-try:
- any = any
-except NameError:
-
- def any(seq):
- for item in seq:
- if item:
- return True
- return False
-
-
-def copytree(src, dst):
- if sys.version_info < (2, 5):
- before_last_dir = os.path.dirname(dst)
- if not os.path.exists(before_last_dir):
- os.makedirs(before_last_dir)
- shutil.copytree(src, dst)
- shutil.copymode(src, dst)
- else:
- shutil.copytree(src, dst)
-
-
-def product(*args, **kwds):
- # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
- # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
- pools = map(tuple, args) * kwds.get('repeat', 1)
- result = [[]]
- for pool in pools:
- result = [x+[y] for x in result for y in pool]
- for prod in result:
- yield tuple(prod)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py
deleted file mode 100755
index f450e839..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py
+++ /dev/null
@@ -1,203 +0,0 @@
-"""Base Command class, and related routines"""
-
-from cStringIO import StringIO
-import getpass
-import os
-import socket
-import sys
-import traceback
-import time
-import urllib
-import urllib2
-
-from pip import commands
-from pip.log import logger
-from pip.baseparser import parser, ConfigOptionParser, UpdatingDefaultsHelpFormatter
-from pip.download import urlopen
-from pip.exceptions import BadCommand, InstallationError, UninstallationError
-from pip.venv import restart_in_venv
-from pip.backwardcompat import walk_packages
-
-__all__ = ['command_dict', 'Command', 'load_all_commands',
- 'load_command', 'command_names']
-
-command_dict = {}
-
-# for backwards compatibiliy
-get_proxy = urlopen.get_proxy
-
-
-class Command(object):
- name = None
- usage = None
- hidden = False
-
- def __init__(self):
- assert self.name
- self.parser = ConfigOptionParser(
- usage=self.usage,
- prog='%s %s' % (sys.argv[0], self.name),
- version=parser.version,
- formatter=UpdatingDefaultsHelpFormatter(),
- name=self.name)
- for option in parser.option_list:
- if not option.dest or option.dest == 'help':
- # -h, --version, etc
- continue
- self.parser.add_option(option)
- command_dict[self.name] = self
-
- def merge_options(self, initial_options, options):
- # Make sure we have all global options carried over
- for attr in ['log', 'venv', 'proxy', 'venv_base', 'require_venv',
- 'respect_venv', 'log_explicit_levels', 'log_file',
- 'timeout', 'default_vcs', 'skip_requirements_regex',
- 'no_input']:
- setattr(options, attr, getattr(initial_options, attr) or getattr(options, attr))
- options.quiet += initial_options.quiet
- options.verbose += initial_options.verbose
-
- def setup_logging(self):
- pass
-
- def main(self, complete_args, args, initial_options):
- options, args = self.parser.parse_args(args)
- self.merge_options(initial_options, options)
-
- level = 1 # Notify
- level += options.verbose
- level -= options.quiet
- level = logger.level_for_integer(4-level)
- complete_log = []
- logger.consumers.extend(
- [(level, sys.stdout),
- (logger.DEBUG, complete_log.append)])
- if options.log_explicit_levels:
- logger.explicit_levels = True
-
- self.setup_logging()
-
- if options.require_venv and not options.venv:
- # If a venv is required check if it can really be found
- if not os.environ.get('VIRTUAL_ENV'):
- logger.fatal('Could not find an activated virtualenv (required).')
- sys.exit(3)
- # Automatically install in currently activated venv if required
- options.respect_venv = True
-
- if args and args[-1] == '___VENV_RESTART___':
- ## FIXME: We don't do anything this this value yet:
- args = args[:-2]
- options.venv = None
- else:
- # If given the option to respect the activated environment
- # check if no venv is given as a command line parameter
- if options.respect_venv and os.environ.get('VIRTUAL_ENV'):
- if options.venv and os.path.exists(options.venv):
- # Make sure command line venv and environmental are the same
- if (os.path.realpath(os.path.expanduser(options.venv)) !=
- os.path.realpath(os.environ.get('VIRTUAL_ENV'))):
- logger.fatal("Given virtualenv (%s) doesn't match "
- "currently activated virtualenv (%s)."
- % (options.venv, os.environ.get('VIRTUAL_ENV')))
- sys.exit(3)
- else:
- options.venv = os.environ.get('VIRTUAL_ENV')
- logger.info('Using already activated environment %s' % options.venv)
- if options.venv:
- logger.info('Running in environment %s' % options.venv)
- site_packages=False
- if options.site_packages:
- site_packages=True
- restart_in_venv(options.venv, options.venv_base, site_packages,
- complete_args)
- # restart_in_venv should actually never return, but for clarity...
- return
-
- ## FIXME: not sure if this sure come before or after venv restart
- if options.log:
- log_fp = open_logfile(options.log, 'a')
- logger.consumers.append((logger.DEBUG, log_fp))
- else:
- log_fp = None
-
- socket.setdefaulttimeout(options.timeout or None)
-
- urlopen.setup(proxystr=options.proxy, prompting=not options.no_input)
-
- exit = 0
- try:
- self.run(options, args)
- except (InstallationError, UninstallationError), e:
- logger.fatal(str(e))
- logger.info('Exception information:\n%s' % format_exc())
- exit = 1
- except BadCommand, e:
- logger.fatal(str(e))
- logger.info('Exception information:\n%s' % format_exc())
- exit = 1
- except:
- logger.fatal('Exception:\n%s' % format_exc())
- exit = 2
-
- if log_fp is not None:
- log_fp.close()
- if exit:
- log_fn = options.log_file
- text = '\n'.join(complete_log)
- logger.fatal('Storing complete log in %s' % log_fn)
- log_fp = open_logfile(log_fn, 'w')
- log_fp.write(text)
- log_fp.close()
- return exit
-
-
-
-
-def format_exc(exc_info=None):
- if exc_info is None:
- exc_info = sys.exc_info()
- out = StringIO()
- traceback.print_exception(*exc_info, **dict(file=out))
- return out.getvalue()
-
-
-def open_logfile(filename, mode='a'):
- """Open the named log file in append mode.
-
- If the file already exists, a separator will also be printed to
- the file to separate past activity from current activity.
- """
- filename = os.path.expanduser(filename)
- filename = os.path.abspath(filename)
- dirname = os.path.dirname(filename)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
- exists = os.path.exists(filename)
-
- log_fp = open(filename, mode)
- if exists:
- print >> log_fp, '-'*60
- print >> log_fp, '%s run on %s' % (sys.argv[0], time.strftime('%c'))
- return log_fp
-
-
-def load_command(name):
- full_name = 'pip.commands.%s' % name
- if full_name in sys.modules:
- return
- try:
- __import__(full_name)
- except ImportError:
- pass
-
-
-def load_all_commands():
- for name in command_names():
- load_command(name)
-
-
-def command_names():
- names = set((pkg[1] for pkg in walk_packages(path=commands.__path__)))
- return list(names)
-
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py
deleted file mode 100755
index a8bd6ce4..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py
+++ /dev/null
@@ -1,231 +0,0 @@
-"""Base option parser setup"""
-
-import sys
-import optparse
-import pkg_resources
-import ConfigParser
-import os
-from distutils.util import strtobool
-from pip.locations import default_config_file, default_log_file
-
-
-class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
- """Custom help formatter for use in ConfigOptionParser that updates
- the defaults before expanding them, allowing them to show up correctly
- in the help listing"""
-
- def expand_default(self, option):
- if self.parser is not None:
- self.parser.update_defaults(self.parser.defaults)
- return optparse.IndentedHelpFormatter.expand_default(self, option)
-
-
-class ConfigOptionParser(optparse.OptionParser):
- """Custom option parser which updates its defaults by by checking the
- configuration files and environmental variables"""
-
- def __init__(self, *args, **kwargs):
- self.config = ConfigParser.RawConfigParser()
- self.name = kwargs.pop('name')
- self.files = self.get_config_files()
- self.config.read(self.files)
- assert self.name
- optparse.OptionParser.__init__(self, *args, **kwargs)
-
- def get_config_files(self):
- config_file = os.environ.get('PIP_CONFIG_FILE', False)
- if config_file and os.path.exists(config_file):
- return [config_file]
- return [default_config_file]
-
- def update_defaults(self, defaults):
- """Updates the given defaults with values from the config files and
- the environ. Does a little special handling for certain types of
- options (lists)."""
- # Then go and look for the other sources of configuration:
- config = {}
- # 1. config files
- for section in ('global', self.name):
- config.update(dict(self.get_config_section(section)))
- # 2. environmental variables
- config.update(dict(self.get_environ_vars()))
- # Then set the options with those values
- for key, val in config.iteritems():
- key = key.replace('_', '-')
- if not key.startswith('--'):
- key = '--%s' % key # only prefer long opts
- option = self.get_option(key)
- if option is not None:
- # ignore empty values
- if not val:
- continue
- # handle multiline configs
- if option.action == 'append':
- val = val.split()
- else:
- option.nargs = 1
- if option.action in ('store_true', 'store_false', 'count'):
- val = strtobool(val)
- try:
- val = option.convert_value(key, val)
- except optparse.OptionValueError, e:
- print ("An error occured during configuration: %s" % e)
- sys.exit(3)
- defaults[option.dest] = val
- return defaults
-
- def get_config_section(self, name):
- """Get a section of a configuration"""
- if self.config.has_section(name):
- return self.config.items(name)
- return []
-
- def get_environ_vars(self, prefix='PIP_'):
- """Returns a generator with all environmental vars with prefix PIP_"""
- for key, val in os.environ.iteritems():
- if key.startswith(prefix):
- yield (key.replace(prefix, '').lower(), val)
-
- def get_default_values(self):
- """Overridding to make updating the defaults after instantiation of
- the option parser possible, update_defaults() does the dirty work."""
- if not self.process_default_values:
- # Old, pre-Optik 1.5 behaviour.
- return optparse.Values(self.defaults)
-
- defaults = self.update_defaults(self.defaults.copy()) # ours
- for option in self._get_all_options():
- default = defaults.get(option.dest)
- if isinstance(default, basestring):
- opt_str = option.get_opt_string()
- defaults[option.dest] = option.check_value(opt_str, default)
- return optparse.Values(defaults)
-
-try:
- pip_dist = pkg_resources.get_distribution('pip')
- version = '%s from %s (python %s)' % (
- pip_dist, pip_dist.location, sys.version[:3])
-except pkg_resources.DistributionNotFound:
- # when running pip.py without installing
- version=None
-
-parser = ConfigOptionParser(
- usage='%prog COMMAND [OPTIONS]',
- version=version,
- add_help_option=False,
- formatter=UpdatingDefaultsHelpFormatter(),
- name='global')
-
-parser.add_option(
- '-h', '--help',
- dest='help',
- action='store_true',
- help='Show help')
-parser.add_option(
- '-E', '--environment',
- dest='venv',
- metavar='DIR',
- help='virtualenv environment to run pip in (either give the '
- 'interpreter or the environment base directory)')
-parser.add_option(
- '-s', '--enable-site-packages',
- dest='site_packages',
- action='store_true',
- help='Include site-packages in virtualenv if one is to be '
- 'created. Ignored if --environment is not used or '
- 'the virtualenv already exists.')
-parser.add_option(
- # Defines a default root directory for virtualenvs, relative
- # virtualenvs names/paths are considered relative to it.
- '--virtualenv-base',
- dest='venv_base',
- type='str',
- default='',
- help=optparse.SUPPRESS_HELP)
-parser.add_option(
- # Run only if inside a virtualenv, bail if not.
- '--require-virtualenv', '--require-venv',
- dest='require_venv',
- action='store_true',
- default=False,
- help=optparse.SUPPRESS_HELP)
-parser.add_option(
- # Use automatically an activated virtualenv instead of installing
- # globally. -E will be ignored if used.
- '--respect-virtualenv', '--respect-venv',
- dest='respect_venv',
- action='store_true',
- default=False,
- help=optparse.SUPPRESS_HELP)
-
-parser.add_option(
- '-v', '--verbose',
- dest='verbose',
- action='count',
- default=0,
- help='Give more output')
-parser.add_option(
- '-q', '--quiet',
- dest='quiet',
- action='count',
- default=0,
- help='Give less output')
-parser.add_option(
- '--log',
- dest='log',
- metavar='FILENAME',
- help='Log file where a complete (maximum verbosity) record will be kept')
-parser.add_option(
- # Writes the log levels explicitely to the log'
- '--log-explicit-levels',
- dest='log_explicit_levels',
- action='store_true',
- default=False,
- help=optparse.SUPPRESS_HELP)
-parser.add_option(
- # The default log file
- '--local-log', '--log-file',
- dest='log_file',
- metavar='FILENAME',
- default=default_log_file,
- help=optparse.SUPPRESS_HELP)
-parser.add_option(
- # Don't ask for input
- '--no-input',
- dest='no_input',
- action='store_true',
- default=False,
- help=optparse.SUPPRESS_HELP)
-
-parser.add_option(
- '--proxy',
- dest='proxy',
- type='str',
- default='',
- help="Specify a proxy in the form user:passwd@proxy.server:port. "
- "Note that the user:password@ is optional and required only if you "
- "are behind an authenticated proxy. If you provide "
- "user@proxy.server:port then you will be prompted for a password.")
-parser.add_option(
- '--timeout', '--default-timeout',
- metavar='SECONDS',
- dest='timeout',
- type='float',
- default=15,
- help='Set the socket timeout (default %default seconds)')
-parser.add_option(
- # The default version control system for editables, e.g. 'svn'
- '--default-vcs',
- dest='default_vcs',
- type='str',
- default='',
- help=optparse.SUPPRESS_HELP)
-parser.add_option(
- # A regex to be used to skip requirements
- '--skip-requirements-regex',
- dest='skip_requirements_regex',
- type='str',
- default='',
- help=optparse.SUPPRESS_HELP)
-
-parser.disable_interspersed_args()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py
deleted file mode 100755
index 792d6005..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-#
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py
deleted file mode 100755
index fb0f7570..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from pip.locations import build_prefix, src_prefix
-from pip.util import display_path, backup_dir
-from pip.log import logger
-from pip.exceptions import InstallationError
-from pip.commands.install import InstallCommand
-
-
-class BundleCommand(InstallCommand):
- name = 'bundle'
- usage = '%prog [OPTIONS] BUNDLE_NAME.pybundle PACKAGE_NAMES...'
- summary = 'Create pybundles (archives containing multiple packages)'
- bundle = True
-
- def __init__(self):
- super(BundleCommand, self).__init__()
-
- def run(self, options, args):
- if not args:
- raise InstallationError('You must give a bundle filename')
- if not options.build_dir:
- options.build_dir = backup_dir(build_prefix, '-bundle')
- if not options.src_dir:
- options.src_dir = backup_dir(src_prefix, '-bundle')
- # We have to get everything when creating a bundle:
- options.ignore_installed = True
- logger.notify('Putting temporary build files in %s and source/develop files in %s'
- % (display_path(options.build_dir), display_path(options.src_dir)))
- self.bundle_filename = args.pop(0)
- requirement_set = super(BundleCommand, self).run(options, args)
- return requirement_set
-
-
-BundleCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py
deleted file mode 100755
index d003b9ae..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import sys
-from pip.basecommand import Command
-
-BASE_COMPLETION = """
-# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
-"""
-
-COMPLETION_SCRIPTS = {
- 'bash': """
-_pip_completion()
-{
- COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
- COMP_CWORD=$COMP_CWORD \\
- PIP_AUTO_COMPLETE=1 $1 ) )
-}
-complete -o default -F _pip_completion pip
-""", 'zsh': """
-function _pip_completion {
- local words cword
- read -Ac words
- read -cn cword
- reply=( $( COMP_WORDS="$words[*]" \\
- COMP_CWORD=$(( cword-1 )) \\
- PIP_AUTO_COMPLETE=1 $words[1] ) )
-}
-compctl -K _pip_completion pip
-"""}
-
-
-class CompletionCommand(Command):
- name = 'completion'
- summary = 'A helper command to be used for command completion'
- hidden = True
-
- def __init__(self):
- super(CompletionCommand, self).__init__()
- self.parser.add_option(
- '--bash', '-b',
- action='store_const',
- const='bash',
- dest='shell',
- help='Emit completion code for bash')
- self.parser.add_option(
- '--zsh', '-z',
- action='store_const',
- const='zsh',
- dest='shell',
- help='Emit completion code for zsh')
-
- def run(self, options, args):
- """Prints the completion code of the given shell"""
- shells = COMPLETION_SCRIPTS.keys()
- shell_options = ['--'+shell for shell in sorted(shells)]
- if options.shell in shells:
- script = COMPLETION_SCRIPTS.get(options.shell, '')
- print BASE_COMPLETION % {'script': script, 'shell': options.shell}
- else:
- sys.stderr.write('ERROR: You must pass %s\n' % ' or '.join(shell_options))
-
-CompletionCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py
deleted file mode 100755
index 01b5df93..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py
+++ /dev/null
@@ -1,109 +0,0 @@
-import re
-import sys
-import pkg_resources
-import pip
-from pip.req import InstallRequirement
-from pip.log import logger
-from pip.basecommand import Command
-from pip.util import get_installed_distributions
-
-
-class FreezeCommand(Command):
- name = 'freeze'
- usage = '%prog [OPTIONS]'
- summary = 'Output all currently installed packages (exact versions) to stdout'
-
- def __init__(self):
- super(FreezeCommand, self).__init__()
- self.parser.add_option(
- '-r', '--requirement',
- dest='requirement',
- action='store',
- default=None,
- metavar='FILENAME',
- help='Use the given requirements file as a hint about how to generate the new frozen requirements')
- self.parser.add_option(
- '-f', '--find-links',
- dest='find_links',
- action='append',
- default=[],
- metavar='URL',
- help='URL for finding packages, which will be added to the frozen requirements file')
- self.parser.add_option(
- '-l', '--local',
- dest='local',
- action='store_true',
- default=False,
- help='If in a virtualenv, do not report globally-installed packages')
-
- def setup_logging(self):
- logger.move_stdout_to_stderr()
-
- def run(self, options, args):
- requirement = options.requirement
- find_links = options.find_links or []
- local_only = options.local
- ## FIXME: Obviously this should be settable:
- find_tags = False
- skip_match = None
-
- skip_regex = options.skip_requirements_regex
- if skip_regex:
- skip_match = re.compile(skip_regex)
-
- dependency_links = []
-
- f = sys.stdout
-
- for dist in pkg_resources.working_set:
- if dist.has_metadata('dependency_links.txt'):
- dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
- for link in find_links:
- if '#egg=' in link:
- dependency_links.append(link)
- for link in find_links:
- f.write('-f %s\n' % link)
- installations = {}
- for dist in get_installed_distributions(local_only=local_only):
- req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
- installations[req.name] = req
- if requirement:
- req_f = open(requirement)
- for line in req_f:
- if not line.strip() or line.strip().startswith('#'):
- f.write(line)
- continue
- if skip_match and skip_match.search(line):
- f.write(line)
- continue
- elif line.startswith('-e') or line.startswith('--editable'):
- if line.startswith('-e'):
- line = line[2:].strip()
- else:
- line = line[len('--editable'):].strip().lstrip('=')
- line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
- elif (line.startswith('-r') or line.startswith('--requirement')
- or line.startswith('-Z') or line.startswith('--always-unzip')
- or line.startswith('-f') or line.startswith('-i')
- or line.startswith('--extra-index-url')):
- f.write(line)
- continue
- else:
- line_req = InstallRequirement.from_line(line)
- if not line_req.name:
- logger.notify("Skipping line because it's not clear what it would install: %s"
- % line.strip())
- logger.notify(" (add #egg=PackageName to the URL to avoid this warning)")
- continue
- if line_req.name not in installations:
- logger.warn("Requirement file contains %s, but that package is not installed"
- % line.strip())
- continue
- f.write(str(installations[line_req.name]))
- del installations[line_req.name]
- f.write('## The following requirements were added by pip --freeze:\n')
- for installation in sorted(installations.values(), key=lambda x: x.name):
- f.write(str(installation))
-
-
-FreezeCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py
deleted file mode 100755
index b0b36611..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from pip.basecommand import Command, command_dict, load_all_commands
-from pip.exceptions import InstallationError
-from pip.baseparser import parser
-
-
-class HelpCommand(Command):
- name = 'help'
- usage = '%prog'
- summary = 'Show available commands'
-
- def run(self, options, args):
- load_all_commands()
- if args:
- ## FIXME: handle errors better here
- command = args[0]
- if command not in command_dict:
- raise InstallationError('No command with the name: %s' % command)
- command = command_dict[command]
- command.parser.print_help()
- return
- parser.print_help()
- print
- print 'Commands available:'
- commands = list(set(command_dict.values()))
- commands.sort(key=lambda x: x.name)
- for command in commands:
- if command.hidden:
- continue
- print ' %s: %s' % (command.name, command.summary)
-
-
-HelpCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py
deleted file mode 100755
index 861c332b..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py
+++ /dev/null
@@ -1,247 +0,0 @@
-import os, sys
-from pip.req import InstallRequirement, RequirementSet
-from pip.req import parse_requirements
-from pip.log import logger
-from pip.locations import build_prefix, src_prefix
-from pip.basecommand import Command
-from pip.index import PackageFinder
-from pip.exceptions import InstallationError
-
-
-class InstallCommand(Command):
- name = 'install'
- usage = '%prog [OPTIONS] PACKAGE_NAMES...'
- summary = 'Install packages'
- bundle = False
-
- def __init__(self):
- super(InstallCommand, self).__init__()
- self.parser.add_option(
- '-e', '--editable',
- dest='editables',
- action='append',
- default=[],
- metavar='VCS+REPOS_URL[@REV]#egg=PACKAGE',
- help='Install a package directly from a checkout. Source will be checked '
- 'out into src/PACKAGE (lower-case) and installed in-place (using '
- 'setup.py develop). You can run this on an existing directory/checkout (like '
- 'pip install -e src/mycheckout). This option may be provided multiple times. '
- 'Possible values for VCS are: svn, git, hg and bzr.')
- self.parser.add_option(
- '-r', '--requirement',
- dest='requirements',
- action='append',
- default=[],
- metavar='FILENAME',
- help='Install all the packages listed in the given requirements file. '
- 'This option can be used multiple times.')
- self.parser.add_option(
- '-f', '--find-links',
- dest='find_links',
- action='append',
- default=[],
- metavar='URL',
- help='URL to look for packages at')
- self.parser.add_option(
- '-i', '--index-url', '--pypi-url',
- dest='index_url',
- metavar='URL',
- default='http://pypi.python.org/simple/',
- help='Base URL of Python Package Index (default %default)')
- self.parser.add_option(
- '--extra-index-url',
- dest='extra_index_urls',
- metavar='URL',
- action='append',
- default=[],
- help='Extra URLs of package indexes to use in addition to --index-url')
- self.parser.add_option(
- '--no-index',
- dest='no_index',
- action='store_true',
- default=False,
- help='Ignore package index (only looking at --find-links URLs instead)')
- self.parser.add_option(
- '-M', '--use-mirrors',
- dest='use_mirrors',
- action='store_true',
- default=False,
- help='Use the PyPI mirrors as a fallback in case the main index is down.')
- self.parser.add_option(
- '--mirrors',
- dest='mirrors',
- metavar='URL',
- action='append',
- default=[],
- help='Specific mirror URLs to query when --use-mirrors is used')
-
- self.parser.add_option(
- '-b', '--build', '--build-dir', '--build-directory',
- dest='build_dir',
- metavar='DIR',
- default=None,
- help='Unpack packages into DIR (default %s) and build from there' % build_prefix)
- self.parser.add_option(
- '-d', '--download', '--download-dir', '--download-directory',
- dest='download_dir',
- metavar='DIR',
- default=None,
- help='Download packages into DIR instead of installing them')
- self.parser.add_option(
- '--download-cache',
- dest='download_cache',
- metavar='DIR',
- default=None,
- help='Cache downloaded packages in DIR')
- self.parser.add_option(
- '--src', '--source', '--source-dir', '--source-directory',
- dest='src_dir',
- metavar='DIR',
- default=None,
- help='Check out --editable packages into DIR (default %s)' % src_prefix)
-
- self.parser.add_option(
- '-U', '--upgrade',
- dest='upgrade',
- action='store_true',
- help='Upgrade all packages to the newest available version')
- self.parser.add_option(
- '-I', '--ignore-installed',
- dest='ignore_installed',
- action='store_true',
- help='Ignore the installed packages (reinstalling instead)')
- self.parser.add_option(
- '--no-deps', '--no-dependencies',
- dest='ignore_dependencies',
- action='store_true',
- default=False,
- help='Ignore package dependencies')
- self.parser.add_option(
- '--no-install',
- dest='no_install',
- action='store_true',
- help="Download and unpack all packages, but don't actually install them")
- self.parser.add_option(
- '--no-download',
- dest='no_download',
- action="store_true",
- help="Don't download any packages, just install the ones already downloaded "
- "(completes an install run with --no-install)")
-
- self.parser.add_option(
- '--install-option',
- dest='install_options',
- action='append',
- help="Extra arguments to be supplied to the setup.py install "
- "command (use like --install-option=\"--install-scripts=/usr/local/bin\"). "
- "Use multiple --install-option options to pass multiple options to setup.py install. "
- "If you are using an option with a directory path, be sure to use absolute path.")
-
- self.parser.add_option(
- '--global-option',
- dest='global_options',
- action='append',
- help="Extra global options to be supplied to the setup.py"
- "call before the install command")
-
- self.parser.add_option(
- '--user',
- dest='use_user_site',
- action='store_true',
- help='Install to user-site')
-
- def _build_package_finder(self, options, index_urls):
- """
- Create a package finder appropriate to this install command.
- This method is meant to be overridden by subclasses, not
- called directly.
- """
- return PackageFinder(find_links=options.find_links,
- index_urls=index_urls,
- use_mirrors=options.use_mirrors,
- mirrors=options.mirrors)
-
- def run(self, options, args):
- if not options.build_dir:
- options.build_dir = build_prefix
- if not options.src_dir:
- options.src_dir = src_prefix
- if options.download_dir:
- options.no_install = True
- options.ignore_installed = True
- options.build_dir = os.path.abspath(options.build_dir)
- options.src_dir = os.path.abspath(options.src_dir)
- install_options = options.install_options or []
- if options.use_user_site:
- install_options.append('--user')
- global_options = options.global_options or []
- index_urls = [options.index_url] + options.extra_index_urls
- if options.no_index:
- logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
- index_urls = []
-
- finder = self._build_package_finder(options, index_urls)
-
- requirement_set = RequirementSet(
- build_dir=options.build_dir,
- src_dir=options.src_dir,
- download_dir=options.download_dir,
- download_cache=options.download_cache,
- upgrade=options.upgrade,
- ignore_installed=options.ignore_installed,
- ignore_dependencies=options.ignore_dependencies)
- for name in args:
- requirement_set.add_requirement(
- InstallRequirement.from_line(name, None))
- for name in options.editables:
- requirement_set.add_requirement(
- InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
- for filename in options.requirements:
- for req in parse_requirements(filename, finder=finder, options=options):
- requirement_set.add_requirement(req)
-
- if not requirement_set.has_requirements:
- if options.find_links:
- raise InstallationError('You must give at least one '
- 'requirement to %s (maybe you meant "pip install %s"?)'
- % (self.name, " ".join(options.find_links)))
- raise InstallationError('You must give at least one requirement '
- 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name))
-
- if (options.use_user_site and
- sys.version_info < (2, 6)):
- raise InstallationError('--user is only supported in Python version 2.6 and newer')
-
- import setuptools
- if (options.use_user_site and
- requirement_set.has_editables and
- not getattr(setuptools, '_distribute', False)):
-
- raise InstallationError('--user --editable not supported with setuptools, use distribute')
-
- if not options.no_download:
- requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
- else:
- requirement_set.locate_files()
-
- if not options.no_install and not self.bundle:
- requirement_set.install(install_options, global_options)
- installed = ' '.join([req.name for req in
- requirement_set.successfully_installed])
- if installed:
- logger.notify('Successfully installed %s' % installed)
- elif not self.bundle:
- downloaded = ' '.join([req.name for req in
- requirement_set.successfully_downloaded])
- if downloaded:
- logger.notify('Successfully downloaded %s' % downloaded)
- elif self.bundle:
- requirement_set.create_bundle(self.bundle_filename)
- logger.notify('Created bundle in %s' % self.bundle_filename)
- # Clean up
- if not options.no_install:
- requirement_set.cleanup_files(bundle=self.bundle)
- return requirement_set
-
-
-InstallCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py
deleted file mode 100755
index 73da58ac..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py
+++ /dev/null
@@ -1,116 +0,0 @@
-import sys
-import xmlrpclib
-import textwrap
-import pkg_resources
-import pip.download
-from pip.basecommand import Command
-from pip.util import get_terminal_size
-from pip.log import logger
-from distutils.version import StrictVersion, LooseVersion
-
-
-class SearchCommand(Command):
- name = 'search'
- usage = '%prog QUERY'
- summary = 'Search PyPI'
-
- def __init__(self):
- super(SearchCommand, self).__init__()
- self.parser.add_option(
- '--index',
- dest='index',
- metavar='URL',
- default='http://pypi.python.org/pypi',
- help='Base URL of Python Package Index (default %default)')
-
- def run(self, options, args):
- if not args:
- logger.warn('ERROR: Missing required argument (search query).')
- return
- query = ' '.join(args)
- index_url = options.index
-
- pypi_hits = self.search(query, index_url)
- hits = transform_hits(pypi_hits)
-
- terminal_width = None
- if sys.stdout.isatty():
- terminal_width = get_terminal_size()[0]
-
- print_results(hits, terminal_width=terminal_width)
-
- def search(self, query, index_url):
- pypi = xmlrpclib.ServerProxy(index_url, pip.download.xmlrpclib_transport)
- hits = pypi.search({'name': query, 'summary': query}, 'or')
- return hits
-
-
-def transform_hits(hits):
- """
- The list from pypi is really a list of versions. We want a list of
- packages with the list of versions stored inline. This converts the
- list from pypi into one we can use.
- """
- packages = {}
- for hit in hits:
- name = hit['name']
- summary = hit['summary']
- version = hit['version']
- score = hit['_pypi_ordering']
-
- if name not in packages.keys():
- packages[name] = {'name': name, 'summary': summary, 'versions': [version], 'score': score}
- else:
- packages[name]['versions'].append(version)
-
- # if this is the highest version, replace summary and score
- if version == highest_version(packages[name]['versions']):
- packages[name]['summary'] = summary
- packages[name]['score'] = score
-
- # each record has a unique name now, so we will convert the dict into a list sorted by score
- package_list = sorted(packages.values(), lambda x, y: cmp(y['score'], x['score']))
- return package_list
-
-
-def print_results(hits, name_column_width=25, terminal_width=None):
- installed_packages = [p.project_name for p in pkg_resources.working_set]
- for hit in hits:
- name = hit['name']
- summary = hit['summary'] or ''
- if terminal_width is not None:
- # wrap and indent summary to fit terminal
- summary = textwrap.wrap(summary, terminal_width - name_column_width - 5)
- summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
- line = '%s - %s' % (name.ljust(name_column_width), summary)
- try:
- logger.notify(line)
- if name in installed_packages:
- dist = pkg_resources.get_distribution(name)
- logger.indent += 2
- try:
- latest = highest_version(hit['versions'])
- if dist.version == latest:
- logger.notify('INSTALLED: %s (latest)' % dist.version)
- else:
- logger.notify('INSTALLED: %s' % dist.version)
- logger.notify('LATEST: %s' % latest)
- finally:
- logger.indent -= 2
- except UnicodeEncodeError:
- pass
-
-
-def compare_versions(version1, version2):
- try:
- return cmp(StrictVersion(version1), StrictVersion(version2))
- # in case of abnormal version number, fall back to LooseVersion
- except ValueError:
- return cmp(LooseVersion(version1), LooseVersion(version2))
-
-
-def highest_version(versions):
- return reduce((lambda v1, v2: compare_versions(v1, v2) == 1 and v1 or v2), versions)
-
-
-SearchCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py
deleted file mode 100755
index 7effd844..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from pip.req import InstallRequirement, RequirementSet, parse_requirements
-from pip.basecommand import Command
-from pip.exceptions import InstallationError
-
-class UninstallCommand(Command):
- name = 'uninstall'
- usage = '%prog [OPTIONS] PACKAGE_NAMES ...'
- summary = 'Uninstall packages'
-
- def __init__(self):
- super(UninstallCommand, self).__init__()
- self.parser.add_option(
- '-r', '--requirement',
- dest='requirements',
- action='append',
- default=[],
- metavar='FILENAME',
- help='Uninstall all the packages listed in the given requirements file. '
- 'This option can be used multiple times.')
- self.parser.add_option(
- '-y', '--yes',
- dest='yes',
- action='store_true',
- help="Don't ask for confirmation of uninstall deletions.")
-
- def run(self, options, args):
- requirement_set = RequirementSet(
- build_dir=None,
- src_dir=None,
- download_dir=None)
- for name in args:
- requirement_set.add_requirement(
- InstallRequirement.from_line(name))
- for filename in options.requirements:
- for req in parse_requirements(filename, options=options):
- requirement_set.add_requirement(req)
- if not requirement_set.has_requirements:
- raise InstallationError('You must give at least one requirement '
- 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name))
- requirement_set.uninstall(auto_confirm=options.yes)
-
-UninstallCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py
deleted file mode 100755
index f83e1820..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from pip.commands.zip import ZipCommand
-
-
-class UnzipCommand(ZipCommand):
- name = 'unzip'
- summary = 'Unzip individual packages'
-
-
-UnzipCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py
deleted file mode 100755
index 346fc051..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py
+++ /dev/null
@@ -1,346 +0,0 @@
-import sys
-import re
-import fnmatch
-import os
-import shutil
-import zipfile
-from pip.util import display_path, backup_dir
-from pip.log import logger
-from pip.exceptions import InstallationError
-from pip.basecommand import Command
-
-
-class ZipCommand(Command):
- name = 'zip'
- usage = '%prog [OPTIONS] PACKAGE_NAMES...'
- summary = 'Zip individual packages'
-
- def __init__(self):
- super(ZipCommand, self).__init__()
- if self.name == 'zip':
- self.parser.add_option(
- '--unzip',
- action='store_true',
- dest='unzip',
- help='Unzip (rather than zip) a package')
- else:
- self.parser.add_option(
- '--zip',
- action='store_false',
- dest='unzip',
- default=True,
- help='Zip (rather than unzip) a package')
- self.parser.add_option(
- '--no-pyc',
- action='store_true',
- dest='no_pyc',
- help='Do not include .pyc files in zip files (useful on Google App Engine)')
- self.parser.add_option(
- '-l', '--list',
- action='store_true',
- dest='list',
- help='List the packages available, and their zip status')
- self.parser.add_option(
- '--sort-files',
- action='store_true',
- dest='sort_files',
- help='With --list, sort packages according to how many files they contain')
- self.parser.add_option(
- '--path',
- action='append',
- dest='paths',
- help='Restrict operations to the given paths (may include wildcards)')
- self.parser.add_option(
- '-n', '--simulate',
- action='store_true',
- help='Do not actually perform the zip/unzip operation')
-
- def paths(self):
- """All the entries of sys.path, possibly restricted by --path"""
- if not self.select_paths:
- return sys.path
- result = []
- match_any = set()
- for path in sys.path:
- path = os.path.normcase(os.path.abspath(path))
- for match in self.select_paths:
- match = os.path.normcase(os.path.abspath(match))
- if '*' in match:
- if re.search(fnmatch.translate(match+'*'), path):
- result.append(path)
- match_any.add(match)
- break
- else:
- if path.startswith(match):
- result.append(path)
- match_any.add(match)
- break
- else:
- logger.debug("Skipping path %s because it doesn't match %s"
- % (path, ', '.join(self.select_paths)))
- for match in self.select_paths:
- if match not in match_any and '*' not in match:
- result.append(match)
- logger.debug("Adding path %s because it doesn't match anything already on sys.path"
- % match)
- return result
-
- def run(self, options, args):
- self.select_paths = options.paths
- self.simulate = options.simulate
- if options.list:
- return self.list(options, args)
- if not args:
- raise InstallationError(
- 'You must give at least one package to zip or unzip')
- packages = []
- for arg in args:
- module_name, filename = self.find_package(arg)
- if options.unzip and os.path.isdir(filename):
- raise InstallationError(
- 'The module %s (in %s) is not a zip file; cannot be unzipped'
- % (module_name, filename))
- elif not options.unzip and not os.path.isdir(filename):
- raise InstallationError(
- 'The module %s (in %s) is not a directory; cannot be zipped'
- % (module_name, filename))
- packages.append((module_name, filename))
- last_status = None
- for module_name, filename in packages:
- if options.unzip:
- last_status = self.unzip_package(module_name, filename)
- else:
- last_status = self.zip_package(module_name, filename, options.no_pyc)
- return last_status
-
- def unzip_package(self, module_name, filename):
- zip_filename = os.path.dirname(filename)
- if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
- raise InstallationError(
- 'Module %s (in %s) isn\'t located in a zip file in %s'
- % (module_name, filename, zip_filename))
- package_path = os.path.dirname(zip_filename)
- if not package_path in self.paths():
- logger.warn(
- 'Unpacking %s into %s, but %s is not on sys.path'
- % (display_path(zip_filename), display_path(package_path),
- display_path(package_path)))
- logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename)))
- if self.simulate:
- logger.notify('Skipping remaining operations because of --simulate')
- return
- logger.indent += 2
- try:
- ## FIXME: this should be undoable:
- zip = zipfile.ZipFile(zip_filename)
- to_save = []
- for name in zip.namelist():
- if name.startswith(module_name + os.path.sep):
- content = zip.read(name)
- dest = os.path.join(package_path, name)
- if not os.path.exists(os.path.dirname(dest)):
- os.makedirs(os.path.dirname(dest))
- if not content and dest.endswith(os.path.sep):
- if not os.path.exists(dest):
- os.makedirs(dest)
- else:
- f = open(dest, 'wb')
- f.write(content)
- f.close()
- else:
- to_save.append((name, zip.read(name)))
- zip.close()
- if not to_save:
- logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
- os.unlink(zip_filename)
- self.remove_filename_from_pth(zip_filename)
- else:
- logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
- zip = zipfile.ZipFile(zip_filename, 'w')
- for name, content in to_save:
- zip.writestr(name, content)
- zip.close()
- finally:
- logger.indent -= 2
-
- def zip_package(self, module_name, filename, no_pyc):
- orig_filename = filename
- logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
- logger.indent += 2
- if filename.endswith('.egg'):
- dest_filename = filename
- else:
- dest_filename = filename + '.zip'
- try:
- ## FIXME: I think this needs to be undoable:
- if filename == dest_filename:
- filename = backup_dir(orig_filename)
- logger.notify('Moving %s aside to %s' % (orig_filename, filename))
- if not self.simulate:
- shutil.move(orig_filename, filename)
- try:
- logger.info('Creating zip file in %s' % display_path(dest_filename))
- if not self.simulate:
- zip = zipfile.ZipFile(dest_filename, 'w')
- zip.writestr(module_name + '/', '')
- for dirpath, dirnames, filenames in os.walk(filename):
- if no_pyc:
- filenames = [f for f in filenames
- if not f.lower().endswith('.pyc')]
- for fns, is_dir in [(dirnames, True), (filenames, False)]:
- for fn in fns:
- full = os.path.join(dirpath, fn)
- dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn)
- if is_dir:
- zip.writestr(dest+'/', '')
- else:
- zip.write(full, dest)
- zip.close()
- logger.info('Removing old directory %s' % display_path(filename))
- if not self.simulate:
- shutil.rmtree(filename)
- except:
- ## FIXME: need to do an undo here
- raise
- ## FIXME: should also be undone:
- self.add_filename_to_pth(dest_filename)
- finally:
- logger.indent -= 2
-
- def remove_filename_from_pth(self, filename):
- for pth in self.pth_files():
- f = open(pth, 'r')
- lines = f.readlines()
- f.close()
- new_lines = [
- l for l in lines if l.strip() != filename]
- if lines != new_lines:
- logger.info('Removing reference to %s from .pth file %s'
- % (display_path(filename), display_path(pth)))
- if not filter(None, new_lines):
- logger.info('%s file would be empty: deleting' % display_path(pth))
- if not self.simulate:
- os.unlink(pth)
- else:
- if not self.simulate:
- f = open(pth, 'wb')
- f.writelines(new_lines)
- f.close()
- return
- logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename))
-
- def add_filename_to_pth(self, filename):
- path = os.path.dirname(filename)
- dest = os.path.join(path, filename + '.pth')
- if path not in self.paths():
- logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest))
- if not self.simulate:
- if os.path.exists(dest):
- f = open(dest)
- lines = f.readlines()
- f.close()
- if lines and not lines[-1].endswith('\n'):
- lines[-1] += '\n'
- lines.append(filename+'\n')
- else:
- lines = [filename + '\n']
- f = open(dest, 'wb')
- f.writelines(lines)
- f.close()
-
- def pth_files(self):
- for path in self.paths():
- if not os.path.exists(path) or not os.path.isdir(path):
- continue
- for filename in os.listdir(path):
- if filename.endswith('.pth'):
- yield os.path.join(path, filename)
-
- def find_package(self, package):
- for path in self.paths():
- full = os.path.join(path, package)
- if os.path.exists(full):
- return package, full
- if not os.path.isdir(path) and zipfile.is_zipfile(path):
- zip = zipfile.ZipFile(path, 'r')
- try:
- zip.read(os.path.join(package, '__init__.py'))
- except KeyError:
- pass
- else:
- zip.close()
- return package, full
- zip.close()
- ## FIXME: need special error for package.py case:
- raise InstallationError(
- 'No package with the name %s found' % package)
-
- def list(self, options, args):
- if args:
- raise InstallationError(
- 'You cannot give an argument with --list')
- for path in sorted(self.paths()):
- if not os.path.exists(path):
- continue
- basename = os.path.basename(path.rstrip(os.path.sep))
- if os.path.isfile(path) and zipfile.is_zipfile(path):
- if os.path.dirname(path) not in self.paths():
- logger.notify('Zipped egg: %s' % display_path(path))
- continue
- if (basename != 'site-packages' and basename != 'dist-packages'
- and not path.replace('\\', '/').endswith('lib/python')):
- continue
- logger.notify('In %s:' % display_path(path))
- logger.indent += 2
- zipped = []
- unzipped = []
- try:
- for filename in sorted(os.listdir(path)):
- ext = os.path.splitext(filename)[1].lower()
- if ext in ('.pth', '.egg-info', '.egg-link'):
- continue
- if ext == '.py':
- logger.info('Not displaying %s: not a package' % display_path(filename))
- continue
- full = os.path.join(path, filename)
- if os.path.isdir(full):
- unzipped.append((filename, self.count_package(full)))
- elif zipfile.is_zipfile(full):
- zipped.append(filename)
- else:
- logger.info('Unknown file: %s' % display_path(filename))
- if zipped:
- logger.notify('Zipped packages:')
- logger.indent += 2
- try:
- for filename in zipped:
- logger.notify(filename)
- finally:
- logger.indent -= 2
- else:
- logger.notify('No zipped packages.')
- if unzipped:
- if options.sort_files:
- unzipped.sort(key=lambda x: -x[1])
- logger.notify('Unzipped packages:')
- logger.indent += 2
- try:
- for filename, count in unzipped:
- logger.notify('%s (%i files)' % (filename, count))
- finally:
- logger.indent -= 2
- else:
- logger.notify('No unzipped packages.')
- finally:
- logger.indent -= 2
-
- def count_package(self, path):
- total = 0
- for dirpath, dirnames, filenames in os.walk(path):
- filenames = [f for f in filenames
- if not f.lower().endswith('.pyc')]
- total += len(filenames)
- return total
-
-
-ZipCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py
deleted file mode 100755
index f1b63936..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py
+++ /dev/null
@@ -1,470 +0,0 @@
-import xmlrpclib
-import re
-import getpass
-import urllib
-import urllib2
-import urlparse
-import os
-import mimetypes
-import shutil
-import tempfile
-from pip.backwardcompat import md5, copytree
-from pip.exceptions import InstallationError
-from pip.util import (splitext,
- format_size, display_path, backup_dir, ask,
- unpack_file, create_download_cache_folder, cache_download)
-from pip.vcs import vcs
-from pip.log import logger
-
-
-__all__ = ['xmlrpclib_transport', 'get_file_content', 'urlopen',
- 'is_url', 'url_to_path', 'path_to_url', 'path_to_url2',
- 'geturl', 'is_archive_file', 'unpack_vcs_link',
- 'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url']
-
-
-xmlrpclib_transport = xmlrpclib.Transport()
-
-
-def get_file_content(url, comes_from=None):
- """Gets the content of a file; it may be a filename, file: URL, or
- http: URL. Returns (location, content)"""
- match = _scheme_re.search(url)
- if match:
- scheme = match.group(1).lower()
- if (scheme == 'file' and comes_from
- and comes_from.startswith('http')):
- raise InstallationError(
- 'Requirements file %s references URL %s, which is local'
- % (comes_from, url))
- if scheme == 'file':
- path = url.split(':', 1)[1]
- path = path.replace('\\', '/')
- match = _url_slash_drive_re.match(path)
- if match:
- path = match.group(1) + ':' + path.split('|', 1)[1]
- path = urllib.unquote(path)
- if path.startswith('/'):
- path = '/' + path.lstrip('/')
- url = path
- else:
- ## FIXME: catch some errors
- resp = urlopen(url)
- return geturl(resp), resp.read()
- try:
- f = open(url)
- content = f.read()
- except IOError, e:
- raise InstallationError('Could not open requirements file: %s' % str(e))
- else:
- f.close()
- return url, content
-
-
-_scheme_re = re.compile(r'^(http|https|file):', re.I)
-_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
-
-class URLOpener(object):
- """
- pip's own URL helper that adds HTTP auth and proxy support
- """
- def __init__(self):
- self.passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
-
- def __call__(self, url):
- """
- If the given url contains auth info or if a normal request gets a 401
- response, an attempt is made to fetch the resource using basic HTTP
- auth.
-
- """
- url, username, password = self.extract_credentials(url)
- if username is None:
- try:
- response = urllib2.urlopen(self.get_request(url))
- except urllib2.HTTPError, e:
- if e.code != 401:
- raise
- response = self.get_response(url)
- else:
- response = self.get_response(url, username, password)
- return response
-
- def get_request(self, url):
- """
- Wraps the URL to retrieve to protects against "creative"
- interpretation of the RFC: http://bugs.python.org/issue8732
- """
- if isinstance(url, basestring):
- url = urllib2.Request(url, headers={'Accept-encoding': 'identity'})
- return url
-
- def get_response(self, url, username=None, password=None):
- """
- does the dirty work of actually getting the rsponse object using urllib2
- and its HTTP auth builtins.
- """
- scheme, netloc, path, query, frag = urlparse.urlsplit(url)
- pass_url = urlparse.urlunsplit(('_none_', netloc, path, query, frag)).replace('_none_://', '', 1)
- req = self.get_request(url)
-
- stored_username, stored_password = self.passman.find_user_password(None, netloc)
- # see if we have a password stored
- if stored_username is None:
- if username is None and self.prompting:
- username = urllib.quote(raw_input('User for %s: ' % netloc))
- password = urllib.quote(getpass.getpass('Password: '))
- if username and password:
- self.passman.add_password(None, netloc, username, password)
- stored_username, stored_password = self.passman.find_user_password(None, netloc)
- authhandler = urllib2.HTTPBasicAuthHandler(self.passman)
- opener = urllib2.build_opener(authhandler)
- # FIXME: should catch a 401 and offer to let the user reenter credentials
- return opener.open(req)
-
- def setup(self, proxystr='', prompting=True):
- """
- Sets the proxy handler given the option passed on the command
- line. If an empty string is passed it looks at the HTTP_PROXY
- environment variable.
- """
- self.prompting = prompting
- proxy = self.get_proxy(proxystr)
- if proxy:
- proxy_support = urllib2.ProxyHandler({"http": proxy, "ftp": proxy})
- opener = urllib2.build_opener(proxy_support, urllib2.CacheFTPHandler)
- urllib2.install_opener(opener)
-
- def parse_credentials(self, netloc):
- if "@" in netloc:
- userinfo = netloc.rsplit("@", 1)[0]
- if ":" in userinfo:
- return userinfo.split(":", 1)
- return userinfo, None
- return None, None
-
- def extract_credentials(self, url):
- """
- Extracts user/password from a url.
-
- Returns a tuple:
- (url-without-auth, username, password)
- """
- if isinstance(url, urllib2.Request):
- result = urlparse.urlsplit(url.get_full_url())
- else:
- result = urlparse.urlsplit(url)
- scheme, netloc, path, query, frag = result
-
- username, password = self.parse_credentials(netloc)
- if username is None:
- return url, None, None
- elif password is None and self.prompting:
- # remove the auth credentials from the url part
- netloc = netloc.replace('%s@' % username, '', 1)
- # prompt for the password
- prompt = 'Password for %s@%s: ' % (username, netloc)
- password = urllib.quote(getpass.getpass(prompt))
- else:
- # remove the auth credentials from the url part
- netloc = netloc.replace('%s:%s@' % (username, password), '', 1)
-
- target_url = urlparse.urlunsplit((scheme, netloc, path, query, frag))
- return target_url, username, password
-
- def get_proxy(self, proxystr=''):
- """
- Get the proxy given the option passed on the command line.
- If an empty string is passed it looks at the HTTP_PROXY
- environment variable.
- """
- if not proxystr:
- proxystr = os.environ.get('HTTP_PROXY', '')
- if proxystr:
- if '@' in proxystr:
- user_password, server_port = proxystr.split('@', 1)
- if ':' in user_password:
- user, password = user_password.split(':', 1)
- else:
- user = user_password
- prompt = 'Password for %s@%s: ' % (user, server_port)
- password = urllib.quote(getpass.getpass(prompt))
- return '%s:%s@%s' % (user, password, server_port)
- else:
- return proxystr
- else:
- return None
-
-urlopen = URLOpener()
-
-
-def is_url(name):
- """Returns true if the name looks like a URL"""
- if ':' not in name:
- return False
- scheme = name.split(':', 1)[0].lower()
- return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
-
-
-def url_to_path(url):
- """
- Convert a file: URL to a path.
- """
- assert url.startswith('file:'), (
- "You can only turn file: urls into filenames (not %r)" % url)
- path = url[len('file:'):].lstrip('/')
- path = urllib.unquote(path)
- if _url_drive_re.match(path):
- path = path[0] + ':' + path[2:]
- else:
- path = '/' + path
- return path
-
-
-_drive_re = re.compile('^([a-z]):', re.I)
-_url_drive_re = re.compile('^([a-z])[:|]', re.I)
-
-
-def path_to_url(path):
- """
- Convert a path to a file: URL. The path will be made absolute.
- """
- path = os.path.normcase(os.path.abspath(path))
- if _drive_re.match(path):
- path = path[0] + '|' + path[2:]
- url = urllib.quote(path)
- url = url.replace(os.path.sep, '/')
- url = url.lstrip('/')
- return 'file:///' + url
-
-
-def path_to_url2(path):
- """
- Convert a path to a file: URL. The path will be made absolute and have
- quoted path parts.
- """
- path = os.path.normpath(os.path.abspath(path))
- drive, path = os.path.splitdrive(path)
- filepath = path.split(os.path.sep)
- url = '/'.join([urllib.quote(part) for part in filepath])
- if not drive:
- url = url.lstrip('/')
- return 'file:///' + drive + url
-
-
-def geturl(urllib2_resp):
- """
- Use instead of urllib.addinfourl.geturl(), which appears to have
- some issues with dropping the double slash for certain schemes
- (e.g. file://). This implementation is probably over-eager, as it
- always restores '://' if it is missing, and it appears some url
- schemata aren't always followed by '//' after the colon, but as
- far as I know pip doesn't need any of those.
- The URI RFC can be found at: http://tools.ietf.org/html/rfc1630
-
- This function assumes that
- scheme:/foo/bar
- is the same as
- scheme:///foo/bar
- """
- url = urllib2_resp.geturl()
- scheme, rest = url.split(':', 1)
- if rest.startswith('//'):
- return url
- else:
- # FIXME: write a good test to cover it
- return '%s://%s' % (scheme, rest)
-
-
-def is_archive_file(name):
- """Return True if `name` is a considered as an archive file."""
- archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle')
- ext = splitext(name)[1].lower()
- if ext in archives:
- return True
- return False
-
-
-def unpack_vcs_link(link, location, only_download=False):
- vcs_backend = _get_used_vcs_backend(link)
- if only_download:
- vcs_backend.export(location)
- else:
- vcs_backend.unpack(location)
-
-
-def unpack_file_url(link, location):
- source = url_to_path(link.url)
- content_type = mimetypes.guess_type(source)[0]
- if os.path.isdir(source):
- # delete the location since shutil will create it again :(
- if os.path.isdir(location):
- shutil.rmtree(location)
- copytree(source, location)
- else:
- unpack_file(source, location, content_type, link)
-
-
-def _get_used_vcs_backend(link):
- for backend in vcs.backends:
- if link.scheme in backend.schemes:
- vcs_backend = backend(link.url)
- return vcs_backend
-
-
-def is_vcs_url(link):
- return bool(_get_used_vcs_backend(link))
-
-
-def is_file_url(link):
- return link.url.lower().startswith('file:')
-
-
-def _check_md5(download_hash, link):
- download_hash = download_hash.hexdigest()
- if download_hash != link.md5_hash:
- logger.fatal("MD5 hash of the package %s (%s) doesn't match the expected hash %s!"
- % (link, download_hash, link.md5_hash))
- raise InstallationError('Bad MD5 hash for package %s' % link)
-
-
-def _get_md5_from_file(target_file, link):
- download_hash = md5()
- fp = open(target_file, 'rb')
- while 1:
- chunk = fp.read(4096)
- if not chunk:
- break
- download_hash.update(chunk)
- fp.close()
- return download_hash
-
-
-def _download_url(resp, link, temp_location):
- fp = open(temp_location, 'wb')
- download_hash = None
- if link.md5_hash:
- download_hash = md5()
- try:
- total_length = int(resp.info()['content-length'])
- except (ValueError, KeyError):
- total_length = 0
- downloaded = 0
- show_progress = total_length > 40*1000 or not total_length
- show_url = link.show_url
- try:
- if show_progress:
- ## FIXME: the URL can get really long in this message:
- if total_length:
- logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
- else:
- logger.start_progress('Downloading %s (unknown size): ' % show_url)
- else:
- logger.notify('Downloading %s' % show_url)
- logger.debug('Downloading from URL %s' % link)
-
- while 1:
- chunk = resp.read(4096)
- if not chunk:
- break
- downloaded += len(chunk)
- if show_progress:
- if not total_length:
- logger.show_progress('%s' % format_size(downloaded))
- else:
- logger.show_progress('%3i%% %s' % (100*downloaded/total_length, format_size(downloaded)))
- if link.md5_hash:
- download_hash.update(chunk)
- fp.write(chunk)
- fp.close()
- finally:
- if show_progress:
- logger.end_progress('%s downloaded' % format_size(downloaded))
- return download_hash
-
-
-def _copy_file(filename, location, content_type, link):
- copy = True
- download_location = os.path.join(location, link.filename)
- if os.path.exists(download_location):
- response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup '
- % display_path(download_location), ('i', 'w', 'b'))
- if response == 'i':
- copy = False
- elif response == 'w':
- logger.warn('Deleting %s' % display_path(download_location))
- os.remove(download_location)
- elif response == 'b':
- dest_file = backup_dir(download_location)
- logger.warn('Backing up %s to %s'
- % (display_path(download_location), display_path(dest_file)))
- shutil.move(download_location, dest_file)
- if copy:
- shutil.copy(filename, download_location)
- logger.indent -= 2
- logger.notify('Saved %s' % display_path(download_location))
-
-
-def unpack_http_url(link, location, download_cache, only_download):
- temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
- target_url = link.url.split('#', 1)[0]
- target_file = None
- download_hash = None
- if download_cache:
- target_file = os.path.join(download_cache,
- urllib.quote(target_url, ''))
- if not os.path.isdir(download_cache):
- create_download_cache_folder(download_cache)
- if (target_file
- and os.path.exists(target_file)
- and os.path.exists(target_file+'.content-type')):
- fp = open(target_file+'.content-type')
- content_type = fp.read().strip()
- fp.close()
- if link.md5_hash:
- download_hash = _get_md5_from_file(target_file, link)
- temp_location = target_file
- logger.notify('Using download cache from %s' % target_file)
- else:
- resp = _get_response_from_url(target_url, link)
- content_type = resp.info()['content-type']
- filename = link.filename
- ext = splitext(filename)[1]
- if not ext:
- ext = mimetypes.guess_extension(content_type)
- if ext:
- filename += ext
- if not ext and link.url != geturl(resp):
- ext = os.path.splitext(geturl(resp))[1]
- if ext:
- filename += ext
- temp_location = os.path.join(temp_dir, filename)
- download_hash = _download_url(resp, link, temp_location)
- if link.md5_hash:
- _check_md5(download_hash, link)
- if only_download:
- _copy_file(temp_location, location, content_type, link)
- else:
- unpack_file(temp_location, location, content_type, link)
- if target_file and target_file != temp_location:
- cache_download(target_file, temp_location, content_type)
- if target_file is None:
- os.unlink(temp_location)
- os.rmdir(temp_dir)
-
-
-def _get_response_from_url(target_url, link):
- try:
- resp = urlopen(target_url)
- except urllib2.HTTPError, e:
- logger.fatal("HTTP error %s while getting %s" % (e.code, link))
- raise
- except IOError, e:
- # Typically an FTP error
- logger.fatal("Error %s while getting %s" % (e, link))
- raise
- return resp
-
-class Urllib2HeadRequest(urllib2.Request):
- def get_method(self):
- return "HEAD"
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py
deleted file mode 100755
index 1ad1a616..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""Exceptions used throughout package"""
-
-
-class InstallationError(Exception):
- """General exception during installation"""
-
-
-class UninstallationError(Exception):
- """General exception during uninstallation"""
-
-
-class DistributionNotFound(InstallationError):
- """Raised when a distribution cannot be found to satisfy a requirement"""
-
-
-class BadCommand(Exception):
- """Raised when virtualenv or a command is not found"""
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py
deleted file mode 100755
index e42d8c86..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py
+++ /dev/null
@@ -1,686 +0,0 @@
-"""Routines related to PyPI, indexes"""
-
-import sys
-import os
-import re
-import mimetypes
-import threading
-import posixpath
-import pkg_resources
-import urllib
-import urllib2
-import urlparse
-import httplib
-import random
-import socket
-import string
-from Queue import Queue
-from Queue import Empty as QueueEmpty
-from pip.log import logger
-from pip.util import Inf
-from pip.util import normalize_name, splitext
-from pip.exceptions import DistributionNotFound
-from pip.backwardcompat import WindowsError, product
-from pip.download import urlopen, path_to_url2, url_to_path, geturl, Urllib2HeadRequest
-
-__all__ = ['PackageFinder']
-
-
-DEFAULT_MIRROR_URL = "last.pypi.python.org"
-
-
-class PackageFinder(object):
- """This finds packages.
-
- This is meant to match easy_install's technique for looking for
- packages, by reading pages and looking for appropriate links
- """
-
- def __init__(self, find_links, index_urls,
- use_mirrors=False, mirrors=None, main_mirror_url=None):
- self.find_links = find_links
- self.index_urls = index_urls
- self.dependency_links = []
- self.cache = PageCache()
- # These are boring links that have already been logged somehow:
- self.logged_links = set()
- if use_mirrors:
- self.mirror_urls = self._get_mirror_urls(mirrors, main_mirror_url)
- logger.info('Using PyPI mirrors: %s' % ', '.join(self.mirror_urls))
- else:
- self.mirror_urls = []
-
- def add_dependency_links(self, links):
- ## FIXME: this shouldn't be global list this, it should only
- ## apply to requirements of the package that specifies the
- ## dependency_links value
- ## FIXME: also, we should track comes_from (i.e., use Link)
- self.dependency_links.extend(links)
-
- @staticmethod
- def _sort_locations(locations):
- """
- Sort locations into "files" (archives) and "urls", and return
- a pair of lists (files,urls)
- """
- files = []
- urls = []
-
- # puts the url for the given file path into the appropriate
- # list
- def sort_path(path):
- url = path_to_url2(path)
- if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
- urls.append(url)
- else:
- files.append(url)
-
- for url in locations:
- if url.startswith('file:'):
- path = url_to_path(url)
- if os.path.isdir(path):
- path = os.path.realpath(path)
- for item in os.listdir(path):
- sort_path(os.path.join(path, item))
- elif os.path.isfile(path):
- sort_path(path)
- else:
- urls.append(url)
- return files, urls
-
- def find_requirement(self, req, upgrade):
- url_name = req.url_name
- # Only check main index if index URL is given:
- main_index_url = None
- if self.index_urls:
- # Check that we have the url_name correctly spelled:
- main_index_url = Link(posixpath.join(self.index_urls[0], url_name))
- # This will also cache the page, so it's okay that we get it again later:
- page = self._get_page(main_index_url, req)
- if page is None:
- url_name = self._find_url_name(Link(self.index_urls[0]), url_name, req) or req.url_name
-
- # Combine index URLs with mirror URLs here to allow
- # adding more index URLs from requirements files
- all_index_urls = self.index_urls + self.mirror_urls
-
- def mkurl_pypi_url(url):
- loc = posixpath.join(url, url_name)
- # For maximum compatibility with easy_install, ensure the path
- # ends in a trailing slash. Although this isn't in the spec
- # (and PyPI can handle it without the slash) some other index
- # implementations might break if they relied on easy_install's behavior.
- if not loc.endswith('/'):
- loc = loc + '/'
- return loc
- if url_name is not None:
- locations = [
- mkurl_pypi_url(url)
- for url in all_index_urls] + self.find_links
- else:
- locations = list(self.find_links)
- locations.extend(self.dependency_links)
- for version in req.absolute_versions:
- if url_name is not None and main_index_url is not None:
- locations = [
- posixpath.join(main_index_url.url, version)] + locations
-
- file_locations, url_locations = self._sort_locations(locations)
-
- locations = [Link(url) for url in url_locations]
- logger.debug('URLs to search for versions for %s:' % req)
- for location in locations:
- logger.debug('* %s' % location)
- found_versions = []
- found_versions.extend(
- self._package_versions(
- [Link(url, '-f') for url in self.find_links], req.name.lower()))
- page_versions = []
- for page in self._get_pages(locations, req):
- logger.debug('Analyzing links from page %s' % page.url)
- logger.indent += 2
- try:
- page_versions.extend(self._package_versions(page.links, req.name.lower()))
- finally:
- logger.indent -= 2
- dependency_versions = list(self._package_versions(
- [Link(url) for url in self.dependency_links], req.name.lower()))
- if dependency_versions:
- logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
- file_versions = list(self._package_versions(
- [Link(url) for url in file_locations], req.name.lower()))
- if not found_versions and not page_versions and not dependency_versions and not file_versions:
- logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
- raise DistributionNotFound('No distributions at all found for %s' % req)
- if req.satisfied_by is not None:
- found_versions.append((req.satisfied_by.parsed_version, Inf, req.satisfied_by.version))
- if file_versions:
- file_versions.sort(reverse=True)
- logger.info('Local files found: %s' % ', '.join([url_to_path(link.url) for parsed, link, version in file_versions]))
- found_versions = file_versions + found_versions
- all_versions = found_versions + page_versions + dependency_versions
- applicable_versions = []
- for (parsed_version, link, version) in all_versions:
- if version not in req.req:
- logger.info("Ignoring link %s, version %s doesn't match %s"
- % (link, version, ','.join([''.join(s) for s in req.req.specs])))
- continue
- applicable_versions.append((link, version))
- applicable_versions = sorted(applicable_versions, key=lambda v: pkg_resources.parse_version(v[1]), reverse=True)
- existing_applicable = bool([link for link, version in applicable_versions if link is Inf])
- if not upgrade and existing_applicable:
- if applicable_versions[0][1] is Inf:
- logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
- % req.satisfied_by.version)
- else:
- logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
- % (req.satisfied_by.version, applicable_versions[0][1]))
- return None
- if not applicable_versions:
- logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
- % (req, ', '.join([version for parsed_version, link, version in found_versions])))
- raise DistributionNotFound('No distributions matching the version for %s' % req)
- if applicable_versions[0][0] is Inf:
- # We have an existing version, and its the best version
- logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
- % (req.satisfied_by.version, ', '.join([version for link, version in applicable_versions[1:]]) or 'none'))
- return None
- if len(applicable_versions) > 1:
- logger.info('Using version %s (newest of versions: %s)' %
- (applicable_versions[0][1], ', '.join([version for link, version in applicable_versions])))
- return applicable_versions[0][0]
-
- def _find_url_name(self, index_url, url_name, req):
- """Finds the true URL name of a package, when the given name isn't quite correct.
- This is usually used to implement case-insensitivity."""
- if not index_url.url.endswith('/'):
- # Vaguely part of the PyPI API... weird but true.
- ## FIXME: bad to modify this?
- index_url.url += '/'
- page = self._get_page(index_url, req)
- if page is None:
- logger.fatal('Cannot fetch index base URL %s' % index_url)
- return
- norm_name = normalize_name(req.url_name)
- for link in page.links:
- base = posixpath.basename(link.path.rstrip('/'))
- if norm_name == normalize_name(base):
- logger.notify('Real name of requirement %s is %s' % (url_name, base))
- return base
- return None
-
- def _get_pages(self, locations, req):
- """Yields (page, page_url) from the given locations, skipping
- locations that have errors, and adding download/homepage links"""
- pending_queue = Queue()
- for location in locations:
- pending_queue.put(location)
- done = []
- seen = set()
- threads = []
- for i in range(min(10, len(locations))):
- t = threading.Thread(target=self._get_queued_page, args=(req, pending_queue, done, seen))
- t.setDaemon(True)
- threads.append(t)
- t.start()
- for t in threads:
- t.join()
- return done
-
- _log_lock = threading.Lock()
-
- def _get_queued_page(self, req, pending_queue, done, seen):
- while 1:
- try:
- location = pending_queue.get(False)
- except QueueEmpty:
- return
- if location in seen:
- continue
- seen.add(location)
- page = self._get_page(location, req)
- if page is None:
- continue
- done.append(page)
- for link in page.rel_links():
- pending_queue.put(link)
-
- _egg_fragment_re = re.compile(r'#egg=([^&]*)')
- _egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
- _py_version_re = re.compile(r'-py([123]\.[0-9])$')
-
- def _sort_links(self, links):
- "Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates"
- eggs, no_eggs = [], []
- seen = set()
- for link in links:
- if link not in seen:
- seen.add(link)
- if link.egg_fragment:
- eggs.append(link)
- else:
- no_eggs.append(link)
- return no_eggs + eggs
-
- def _package_versions(self, links, search_name):
- for link in self._sort_links(links):
- for v in self._link_package_versions(link, search_name):
- yield v
-
- def _link_package_versions(self, link, search_name):
- """
- Return an iterable of triples (pkg_resources_version_key,
- link, python_version) that can be extracted from the given
- link.
-
- Meant to be overridden by subclasses, not called by clients.
- """
- if link.egg_fragment:
- egg_info = link.egg_fragment
- else:
- egg_info, ext = link.splitext()
- if not ext:
- if link not in self.logged_links:
- logger.debug('Skipping link %s; not a file' % link)
- self.logged_links.add(link)
- return []
- if egg_info.endswith('.tar'):
- # Special double-extension case:
- egg_info = egg_info[:-4]
- ext = '.tar' + ext
- if ext not in ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip'):
- if link not in self.logged_links:
- logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
- self.logged_links.add(link)
- return []
- version = self._egg_info_matches(egg_info, search_name, link)
- if version is None:
- logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
- return []
- match = self._py_version_re.search(version)
- if match:
- version = version[:match.start()]
- py_version = match.group(1)
- if py_version != sys.version[:3]:
- logger.debug('Skipping %s because Python version is incorrect' % link)
- return []
- logger.debug('Found link %s, version: %s' % (link, version))
- return [(pkg_resources.parse_version(version),
- link,
- version)]
-
- def _egg_info_matches(self, egg_info, search_name, link):
- match = self._egg_info_re.search(egg_info)
- if not match:
- logger.debug('Could not parse version from link: %s' % link)
- return None
- name = match.group(0).lower()
- # To match the "safe" name that pkg_resources creates:
- name = name.replace('_', '-')
- if name.startswith(search_name.lower()):
- return match.group(0)[len(search_name):].lstrip('-')
- else:
- return None
-
- def _get_page(self, link, req):
- return HTMLPage.get_page(link, req, cache=self.cache)
-
- def _get_mirror_urls(self, mirrors=None, main_mirror_url=None):
- """Retrieves a list of URLs from the main mirror DNS entry
- unless a list of mirror URLs are passed.
- """
- if not mirrors:
- mirrors = get_mirrors(main_mirror_url)
- # Should this be made "less random"? E.g. netselect like?
- random.shuffle(mirrors)
-
- mirror_urls = set()
- for mirror_url in mirrors:
- # Make sure we have a valid URL
- if not ("http://" or "https://" or "file://") in mirror_url:
- mirror_url = "http://%s" % mirror_url
- if not mirror_url.endswith("/simple"):
- mirror_url = "%s/simple/" % mirror_url
- mirror_urls.add(mirror_url)
-
- return list(mirror_urls)
-
-
-class PageCache(object):
- """Cache of HTML pages"""
-
- failure_limit = 3
-
- def __init__(self):
- self._failures = {}
- self._pages = {}
- self._archives = {}
-
- def too_many_failures(self, url):
- return self._failures.get(url, 0) >= self.failure_limit
-
- def get_page(self, url):
- return self._pages.get(url)
-
- def is_archive(self, url):
- return self._archives.get(url, False)
-
- def set_is_archive(self, url, value=True):
- self._archives[url] = value
-
- def add_page_failure(self, url, level):
- self._failures[url] = self._failures.get(url, 0)+level
-
- def add_page(self, urls, page):
- for url in urls:
- self._pages[url] = page
-
-
-class HTMLPage(object):
- """Represents one page, along with its URL"""
-
- ## FIXME: these regexes are horrible hacks:
- _homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
- _download_re = re.compile(r'<th>\s*download\s+url', re.I)
- ## These aren't so aweful:
- _rel_re = re.compile("""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""", re.I)
- _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
- _base_re = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I)
-
- def __init__(self, content, url, headers=None):
- self.content = content
- self.url = url
- self.headers = headers
-
- def __str__(self):
- return self.url
-
- @classmethod
- def get_page(cls, link, req, cache=None, skip_archives=True):
- url = link.url
- url = url.split('#', 1)[0]
- if cache.too_many_failures(url):
- return None
-
- # Check for VCS schemes that do not support lookup as web pages.
- from pip.vcs import VcsSupport
- for scheme in VcsSupport.schemes:
- if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
- logger.debug('Cannot look at %(scheme)s URL %(link)s' % locals())
- return None
-
- if cache is not None:
- inst = cache.get_page(url)
- if inst is not None:
- return inst
- try:
- if skip_archives:
- if cache is not None:
- if cache.is_archive(url):
- return None
- filename = link.filename
- for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
- if filename.endswith(bad_ext):
- content_type = cls._get_content_type(url)
- if content_type.lower().startswith('text/html'):
- break
- else:
- logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
- if cache is not None:
- cache.set_is_archive(url)
- return None
- logger.debug('Getting page %s' % url)
-
- # Tack index.html onto file:// URLs that point to directories
- (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
- if scheme == 'file' and os.path.isdir(urllib.url2pathname(path)):
- # add trailing slash if not present so urljoin doesn't trim final segment
- if not url.endswith('/'):
- url += '/'
- url = urlparse.urljoin(url, 'index.html')
- logger.debug(' file: URL is directory, getting %s' % url)
-
- resp = urlopen(url)
-
- real_url = geturl(resp)
- headers = resp.info()
- inst = cls(resp.read(), real_url, headers)
- except (urllib2.HTTPError, urllib2.URLError, socket.timeout, socket.error, OSError, WindowsError), e:
- desc = str(e)
- if isinstance(e, socket.timeout):
- log_meth = logger.info
- level =1
- desc = 'timed out'
- elif isinstance(e, urllib2.URLError):
- log_meth = logger.info
- if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout):
- desc = 'timed out'
- level = 1
- else:
- level = 2
- elif isinstance(e, urllib2.HTTPError) and e.code == 404:
- ## FIXME: notify?
- log_meth = logger.info
- level = 2
- else:
- log_meth = logger.info
- level = 1
- log_meth('Could not fetch URL %s: %s' % (link, desc))
- log_meth('Will skip URL %s when looking for download links for %s' % (link.url, req))
- if cache is not None:
- cache.add_page_failure(url, level)
- return None
- if cache is not None:
- cache.add_page([url, real_url], inst)
- return inst
-
- @staticmethod
- def _get_content_type(url):
- """Get the Content-Type of the given url, using a HEAD request"""
- scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
- if not scheme in ('http', 'https', 'ftp', 'ftps'):
- ## FIXME: some warning or something?
- ## assertion error?
- return ''
- req = Urllib2HeadRequest(url, headers={'Host': netloc})
- resp = urlopen(req)
- try:
- if hasattr(resp, 'code') and resp.code != 200 and scheme not in ('ftp', 'ftps'):
- ## FIXME: doesn't handle redirects
- return ''
- return resp.info().get('content-type', '')
- finally:
- resp.close()
-
- @property
- def base_url(self):
- if not hasattr(self, "_base_url"):
- match = self._base_re.search(self.content)
- if match:
- self._base_url = match.group(1)
- else:
- self._base_url = self.url
- return self._base_url
-
- @property
- def links(self):
- """Yields all links in the page"""
- for match in self._href_re.finditer(self.content):
- url = match.group(1) or match.group(2) or match.group(3)
- url = self.clean_link(urlparse.urljoin(self.base_url, url))
- yield Link(url, self)
-
- def rel_links(self):
- for url in self.explicit_rel_links():
- yield url
- for url in self.scraped_rel_links():
- yield url
-
- def explicit_rel_links(self, rels=('homepage', 'download')):
- """Yields all links with the given relations"""
- for match in self._rel_re.finditer(self.content):
- found_rels = match.group(1).lower().split()
- for rel in rels:
- if rel in found_rels:
- break
- else:
- continue
- match = self._href_re.search(match.group(0))
- if not match:
- continue
- url = match.group(1) or match.group(2) or match.group(3)
- url = self.clean_link(urlparse.urljoin(self.base_url, url))
- yield Link(url, self)
-
- def scraped_rel_links(self):
- for regex in (self._homepage_re, self._download_re):
- match = regex.search(self.content)
- if not match:
- continue
- href_match = self._href_re.search(self.content, pos=match.end())
- if not href_match:
- continue
- url = match.group(1) or match.group(2) or match.group(3)
- if not url:
- continue
- url = self.clean_link(urlparse.urljoin(self.base_url, url))
- yield Link(url, self)
-
- _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
-
- def clean_link(self, url):
- """Makes sure a link is fully encoded. That is, if a ' ' shows up in
- the link, it will be rewritten to %20 (while not over-quoting
- % or other characters)."""
- return self._clean_re.sub(
- lambda match: '%%%2x' % ord(match.group(0)), url)
-
-
-class Link(object):
-
- def __init__(self, url, comes_from=None):
- self.url = url
- self.comes_from = comes_from
-
- def __str__(self):
- if self.comes_from:
- return '%s (from %s)' % (self.url, self.comes_from)
- else:
- return self.url
-
- def __repr__(self):
- return '<Link %s>' % self
-
- def __eq__(self, other):
- return self.url == other.url
-
- def __hash__(self):
- return hash(self.url)
-
- @property
- def filename(self):
- url = self.url
- url = url.split('#', 1)[0]
- url = url.split('?', 1)[0]
- url = url.rstrip('/')
- name = posixpath.basename(url)
- assert name, (
- 'URL %r produced no filename' % url)
- return name
-
- @property
- def scheme(self):
- return urlparse.urlsplit(self.url)[0]
-
- @property
- def path(self):
- return urlparse.urlsplit(self.url)[2]
-
- def splitext(self):
- return splitext(posixpath.basename(self.path.rstrip('/')))
-
- _egg_fragment_re = re.compile(r'#egg=([^&]*)')
-
- @property
- def egg_fragment(self):
- match = self._egg_fragment_re.search(self.url)
- if not match:
- return None
- return match.group(1)
-
- _md5_re = re.compile(r'md5=([a-f0-9]+)')
-
- @property
- def md5_hash(self):
- match = self._md5_re.search(self.url)
- if match:
- return match.group(1)
- return None
-
- @property
- def show_url(self):
- return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
-
-
-def get_requirement_from_url(url):
- """Get a requirement from the URL, if possible. This looks for #egg
- in the URL"""
- link = Link(url)
- egg_info = link.egg_fragment
- if not egg_info:
- egg_info = splitext(link.filename)[0]
- return package_to_requirement(egg_info)
-
-
-def package_to_requirement(package_name):
- """Translate a name like Foo-1.2 to Foo==1.3"""
- match = re.search(r'^(.*?)(-dev|-\d.*)', package_name)
- if match:
- name = match.group(1)
- version = match.group(2)
- else:
- name = package_name
- version = ''
- if version:
- return '%s==%s' % (name, version)
- else:
- return name
-
-
-def get_mirrors(hostname=None):
- """Return the list of mirrors from the last record found on the DNS
- entry::
-
- >>> from pip.index import get_mirrors
- >>> get_mirrors()
- ['a.pypi.python.org', 'b.pypi.python.org', 'c.pypi.python.org',
- 'd.pypi.python.org']
-
- Originally written for the distutils2 project by Alexis Metaireau.
- """
- if hostname is None:
- hostname = DEFAULT_MIRROR_URL
-
- # return the last mirror registered on PyPI.
- try:
- hostname = socket.gethostbyname_ex(hostname)[0]
- except socket.gaierror:
- return []
- end_letter = hostname.split(".", 1)
-
- # determine the list from the last one.
- return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])]
-
-
-def string_range(last):
- """Compute the range of string between "a" and last.
-
- This works for simple "a to z" lists, but also for "a to zz" lists.
- """
- for k in range(len(last)):
- for x in product(string.ascii_lowercase, repeat=k+1):
- result = ''.join(x)
- yield result
- if result == last:
- return
-
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py
deleted file mode 100755
index 4254ef2f..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""Locations where we look for configs, install stuff, etc"""
-
-import sys
-import os
-from distutils import sysconfig
-
-
-def running_under_virtualenv():
- """
- Return True if we're running inside a virtualenv, False otherwise.
-
- """
- return hasattr(sys, 'real_prefix')
-
-
-if running_under_virtualenv():
- ## FIXME: is build/ a good name?
- build_prefix = os.path.join(sys.prefix, 'build')
- src_prefix = os.path.join(sys.prefix, 'src')
-else:
- ## FIXME: this isn't a very good default
- build_prefix = os.path.join(os.getcwd(), 'build')
- src_prefix = os.path.join(os.getcwd(), 'src')
-
-# FIXME doesn't account for venv linked to global site-packages
-
-site_packages = sysconfig.get_python_lib()
-user_dir = os.path.expanduser('~')
-if sys.platform == 'win32':
- bin_py = os.path.join(sys.prefix, 'Scripts')
- # buildout uses 'bin' on Windows too?
- if not os.path.exists(bin_py):
- bin_py = os.path.join(sys.prefix, 'bin')
- user_dir = os.environ.get('APPDATA', user_dir) # Use %APPDATA% for roaming
- default_storage_dir = os.path.join(user_dir, 'pip')
- default_config_file = os.path.join(default_storage_dir, 'pip.ini')
- default_log_file = os.path.join(default_storage_dir, 'pip.log')
-else:
- bin_py = os.path.join(sys.prefix, 'bin')
- default_storage_dir = os.path.join(user_dir, '.pip')
- default_config_file = os.path.join(default_storage_dir, 'pip.conf')
- default_log_file = os.path.join(default_storage_dir, 'pip.log')
- # Forcing to use /usr/local/bin for standard Mac OS X framework installs
- if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
- bin_py = '/usr/local/bin'
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py
deleted file mode 100755
index 0218ab1a..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py
+++ /dev/null
@@ -1,181 +0,0 @@
-"""Logging
-"""
-
-import sys
-import logging
-
-
-class Logger(object):
-
- """
- Logging object for use in command-line script. Allows ranges of
- levels, to avoid some redundancy of displayed information.
- """
-
- VERBOSE_DEBUG = logging.DEBUG-1
- DEBUG = logging.DEBUG
- INFO = logging.INFO
- NOTIFY = (logging.INFO+logging.WARN)/2
- WARN = WARNING = logging.WARN
- ERROR = logging.ERROR
- FATAL = logging.FATAL
-
- LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
-
- def __init__(self):
- self.consumers = []
- self.indent = 0
- self.explicit_levels = False
- self.in_progress = None
- self.in_progress_hanging = False
-
- def debug(self, msg, *args, **kw):
- self.log(self.DEBUG, msg, *args, **kw)
-
- def info(self, msg, *args, **kw):
- self.log(self.INFO, msg, *args, **kw)
-
- def notify(self, msg, *args, **kw):
- self.log(self.NOTIFY, msg, *args, **kw)
-
- def warn(self, msg, *args, **kw):
- self.log(self.WARN, msg, *args, **kw)
-
- def error(self, msg, *args, **kw):
- self.log(self.WARN, msg, *args, **kw)
-
- def fatal(self, msg, *args, **kw):
- self.log(self.FATAL, msg, *args, **kw)
-
- def log(self, level, msg, *args, **kw):
- if args:
- if kw:
- raise TypeError(
- "You may give positional or keyword arguments, not both")
- args = args or kw
- rendered = None
- for consumer_level, consumer in self.consumers:
- if self.level_matches(level, consumer_level):
- if (self.in_progress_hanging
- and consumer in (sys.stdout, sys.stderr)):
- self.in_progress_hanging = False
- sys.stdout.write('\n')
- sys.stdout.flush()
- if rendered is None:
- if args:
- rendered = msg % args
- else:
- rendered = msg
- rendered = ' '*self.indent + rendered
- if self.explicit_levels:
- ## FIXME: should this be a name, not a level number?
- rendered = '%02i %s' % (level, rendered)
- if hasattr(consumer, 'write'):
- consumer.write(rendered+'\n')
- else:
- consumer(rendered)
-
- def start_progress(self, msg):
- assert not self.in_progress, (
- "Tried to start_progress(%r) while in_progress %r"
- % (msg, self.in_progress))
- if self.level_matches(self.NOTIFY, self._stdout_level()):
- sys.stdout.write(' '*self.indent + msg)
- sys.stdout.flush()
- self.in_progress_hanging = True
- else:
- self.in_progress_hanging = False
- self.in_progress = msg
- self.last_message = None
-
- def end_progress(self, msg='done.'):
- assert self.in_progress, (
- "Tried to end_progress without start_progress")
- if self.stdout_level_matches(self.NOTIFY):
- if not self.in_progress_hanging:
- # Some message has been printed out since start_progress
- sys.stdout.write('...' + self.in_progress + msg + '\n')
- sys.stdout.flush()
- else:
- # These erase any messages shown with show_progress (besides .'s)
- logger.show_progress('')
- logger.show_progress('')
- sys.stdout.write(msg + '\n')
- sys.stdout.flush()
- self.in_progress = None
- self.in_progress_hanging = False
-
- def show_progress(self, message=None):
- """If we are in a progress scope, and no log messages have been
- shown, write out another '.'"""
- if self.in_progress_hanging:
- if message is None:
- sys.stdout.write('.')
- sys.stdout.flush()
- else:
- if self.last_message:
- padding = ' ' * max(0, len(self.last_message)-len(message))
- else:
- padding = ''
- sys.stdout.write('\r%s%s%s%s' % (' '*self.indent, self.in_progress, message, padding))
- sys.stdout.flush()
- self.last_message = message
-
- def stdout_level_matches(self, level):
- """Returns true if a message at this level will go to stdout"""
- return self.level_matches(level, self._stdout_level())
-
- def _stdout_level(self):
- """Returns the level that stdout runs at"""
- for level, consumer in self.consumers:
- if consumer is sys.stdout:
- return level
- return self.FATAL
-
- def level_matches(self, level, consumer_level):
- """
- >>> l = Logger()
- >>> l.level_matches(3, 4)
- False
- >>> l.level_matches(3, 2)
- True
- >>> l.level_matches(slice(None, 3), 3)
- False
- >>> l.level_matches(slice(None, 3), 2)
- True
- >>> l.level_matches(slice(1, 3), 1)
- True
- >>> l.level_matches(slice(2, 3), 1)
- False
- """
- if isinstance(level, slice):
- start, stop = level.start, level.stop
- if start is not None and start > consumer_level:
- return False
- if stop is not None or stop <= consumer_level:
- return False
- return True
- else:
- return level >= consumer_level
-
- @classmethod
- def level_for_integer(cls, level):
- levels = cls.LEVELS
- if level < 0:
- return levels[0]
- if level >= len(levels):
- return levels[-1]
- return levels[level]
-
- def move_stdout_to_stderr(self):
- to_remove = []
- to_add = []
- for consumer_level, consumer in self.consumers:
- if consumer == sys.stdout:
- to_remove.append((consumer_level, consumer))
- to_add.append((consumer_level, sys.stderr))
- for item in to_remove:
- self.consumers.remove(item)
- self.consumers.extend(to_add)
-
-logger = Logger()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py
deleted file mode 100755
index 444e7252..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py
+++ /dev/null
@@ -1,1432 +0,0 @@
-import sys
-import os
-import shutil
-import re
-import zipfile
-import pkg_resources
-import tempfile
-import urlparse
-import urllib2
-import urllib
-import ConfigParser
-from distutils.sysconfig import get_python_version
-from email.FeedParser import FeedParser
-from pip.locations import bin_py, running_under_virtualenv
-from pip.exceptions import InstallationError, UninstallationError
-from pip.vcs import vcs
-from pip.log import logger
-from pip.util import display_path, rmtree
-from pip.util import ask, backup_dir
-from pip.util import is_installable_dir, is_local, dist_is_local
-from pip.util import renames, normalize_path, egg_link_path
-from pip.util import make_path_relative
-from pip import call_subprocess
-from pip.backwardcompat import any, copytree
-from pip.index import Link
-from pip.locations import build_prefix
-from pip.download import (get_file_content, is_url, url_to_path,
- path_to_url, is_archive_file,
- unpack_vcs_link, is_vcs_url, is_file_url,
- unpack_file_url, unpack_http_url)
-
-
-PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
-
-
-class InstallRequirement(object):
-
- def __init__(self, req, comes_from, source_dir=None, editable=False,
- url=None, update=True):
- if isinstance(req, basestring):
- req = pkg_resources.Requirement.parse(req)
- self.req = req
- self.comes_from = comes_from
- self.source_dir = source_dir
- self.editable = editable
- self.url = url
- self._egg_info_path = None
- # This holds the pkg_resources.Distribution object if this requirement
- # is already available:
- self.satisfied_by = None
- # This hold the pkg_resources.Distribution object if this requirement
- # conflicts with another installed distribution:
- self.conflicts_with = None
- self._temp_build_dir = None
- self._is_bundle = None
- # True if the editable should be updated:
- self.update = update
- # Set to True after successful installation
- self.install_succeeded = None
- # UninstallPathSet of uninstalled distribution (for possible rollback)
- self.uninstalled = None
-
- @classmethod
- def from_editable(cls, editable_req, comes_from=None, default_vcs=None):
- name, url = parse_editable(editable_req, default_vcs)
- if url.startswith('file:'):
- source_dir = url_to_path(url)
- else:
- source_dir = None
- return cls(name, comes_from, source_dir=source_dir, editable=True, url=url)
-
- @classmethod
- def from_line(cls, name, comes_from=None):
- """Creates an InstallRequirement from a name, which might be a
- requirement, directory containing 'setup.py', filename, or URL.
- """
- url = None
- name = name.strip()
- req = name
- path = os.path.normpath(os.path.abspath(name))
-
- if is_url(name):
- url = name
- ## FIXME: I think getting the requirement here is a bad idea:
- #req = get_requirement_from_url(url)
- req = None
- elif os.path.isdir(path) and (os.path.sep in name or name.startswith('.')):
- if not is_installable_dir(path):
- raise InstallationError("Directory %r is not installable. File 'setup.py' not found."
- % name)
- url = path_to_url(name)
- #req = get_requirement_from_url(url)
- req = None
- elif is_archive_file(path):
- if not os.path.isfile(path):
- logger.warn('Requirement %r looks like a filename, but the file does not exist'
- % name)
- url = path_to_url(name)
- #req = get_requirement_from_url(url)
- req = None
- return cls(req, comes_from, url=url)
-
- def __str__(self):
- if self.req:
- s = str(self.req)
- if self.url:
- s += ' from %s' % self.url
- else:
- s = self.url
- if self.satisfied_by is not None:
- s += ' in %s' % display_path(self.satisfied_by.location)
- if self.comes_from:
- if isinstance(self.comes_from, basestring):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += ' (from %s)' % comes_from
- return s
-
- def from_path(self):
- if self.req is None:
- return None
- s = str(self.req)
- if self.comes_from:
- if isinstance(self.comes_from, basestring):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += '->' + comes_from
- return s
-
- def build_location(self, build_dir, unpack=True):
- if self._temp_build_dir is not None:
- return self._temp_build_dir
- if self.req is None:
- self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-')
- self._ideal_build_dir = build_dir
- return self._temp_build_dir
- if self.editable:
- name = self.name.lower()
- else:
- name = self.name
- # FIXME: Is there a better place to create the build_dir? (hg and bzr need this)
- if not os.path.exists(build_dir):
- _make_build_dir(build_dir)
- return os.path.join(build_dir, name)
-
- def correct_build_location(self):
- """If the build location was a temporary directory, this will move it
- to a new more permanent location"""
- if self.source_dir is not None:
- return
- assert self.req is not None
- assert self._temp_build_dir
- old_location = self._temp_build_dir
- new_build_dir = self._ideal_build_dir
- del self._ideal_build_dir
- if self.editable:
- name = self.name.lower()
- else:
- name = self.name
- new_location = os.path.join(new_build_dir, name)
- if not os.path.exists(new_build_dir):
- logger.debug('Creating directory %s' % new_build_dir)
- _make_build_dir(new_build_dir)
- if os.path.exists(new_location):
- raise InstallationError(
- 'A package already exists in %s; please remove it to continue'
- % display_path(new_location))
- logger.debug('Moving package %s from %s to new location %s'
- % (self, display_path(old_location), display_path(new_location)))
- shutil.move(old_location, new_location)
- self._temp_build_dir = new_location
- self.source_dir = new_location
- self._egg_info_path = None
-
- @property
- def name(self):
- if self.req is None:
- return None
- return self.req.project_name
-
- @property
- def url_name(self):
- if self.req is None:
- return None
- return urllib.quote(self.req.unsafe_name)
-
- @property
- def setup_py(self):
- return os.path.join(self.source_dir, 'setup.py')
-
- def run_egg_info(self, force_root_egg_info=False):
- assert self.source_dir
- if self.name:
- logger.notify('Running setup.py egg_info for package %s' % self.name)
- else:
- logger.notify('Running setup.py egg_info for package from %s' % self.url)
- logger.indent += 2
- try:
- script = self._run_setup_py
- script = script.replace('__SETUP_PY__', repr(self.setup_py))
- script = script.replace('__PKG_NAME__', repr(self.name))
- # We can't put the .egg-info files at the root, because then the source code will be mistaken
- # for an installed egg, causing problems
- if self.editable or force_root_egg_info:
- egg_base_option = []
- else:
- egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info')
- if not os.path.exists(egg_info_dir):
- os.makedirs(egg_info_dir)
- egg_base_option = ['--egg-base', 'pip-egg-info']
- call_subprocess(
- [sys.executable, '-c', script, 'egg_info'] + egg_base_option,
- cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False,
- command_level=logger.VERBOSE_DEBUG,
- command_desc='python setup.py egg_info')
- finally:
- logger.indent -= 2
- if not self.req:
- self.req = pkg_resources.Requirement.parse(self.pkg_info()['Name'])
- self.correct_build_location()
-
- ## FIXME: This is a lame hack, entirely for PasteScript which has
- ## a self-provided entry point that causes this awkwardness
- _run_setup_py = """
-__file__ = __SETUP_PY__
-from setuptools.command import egg_info
-def replacement_run(self):
- self.mkpath(self.egg_info)
- installer = self.distribution.fetch_build_egg
- for ep in egg_info.iter_entry_points('egg_info.writers'):
- # require=False is the change we're making:
- writer = ep.load(require=False)
- if writer:
- writer(self, ep.name, egg_info.os.path.join(self.egg_info,ep.name))
- self.find_sources()
-egg_info.egg_info.run = replacement_run
-execfile(__file__)
-"""
-
- def egg_info_data(self, filename):
- if self.satisfied_by is not None:
- if not self.satisfied_by.has_metadata(filename):
- return None
- return self.satisfied_by.get_metadata(filename)
- assert self.source_dir
- filename = self.egg_info_path(filename)
- if not os.path.exists(filename):
- return None
- fp = open(filename, 'r')
- data = fp.read()
- fp.close()
- return data
-
- def egg_info_path(self, filename):
- if self._egg_info_path is None:
- if self.editable:
- base = self.source_dir
- else:
- base = os.path.join(self.source_dir, 'pip-egg-info')
- filenames = os.listdir(base)
- if self.editable:
- filenames = []
- for root, dirs, files in os.walk(base):
- for dir in vcs.dirnames:
- if dir in dirs:
- dirs.remove(dir)
- for dir in dirs:
- # Don't search in anything that looks like a virtualenv environment
- if (os.path.exists(os.path.join(root, dir, 'bin', 'python'))
- or os.path.exists(os.path.join(root, dir, 'Scripts', 'Python.exe'))):
- dirs.remove(dir)
- # Also don't search through tests
- if dir == 'test' or dir == 'tests':
- dirs.remove(dir)
- filenames.extend([os.path.join(root, dir)
- for dir in dirs])
- filenames = [f for f in filenames if f.endswith('.egg-info')]
-
- if not filenames:
- raise InstallationError('No files/directores in %s (from %s)' % (base, filename))
- assert filenames, "No files/directories in %s (from %s)" % (base, filename)
-
- # if we have more than one match, we pick the toplevel one. This can
- # easily be the case if there is a dist folder which contains an
- # extracted tarball for testing purposes.
- if len(filenames) > 1:
- filenames.sort(key=lambda x: x.count(os.path.sep) +
- (os.path.altsep and
- x.count(os.path.altsep) or 0))
- self._egg_info_path = os.path.join(base, filenames[0])
- return os.path.join(self._egg_info_path, filename)
-
- def egg_info_lines(self, filename):
- data = self.egg_info_data(filename)
- if not data:
- return []
- result = []
- for line in data.splitlines():
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- result.append(line)
- return result
-
- def pkg_info(self):
- p = FeedParser()
- data = self.egg_info_data('PKG-INFO')
- if not data:
- logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO')))
- p.feed(data or '')
- return p.close()
-
- @property
- def dependency_links(self):
- return self.egg_info_lines('dependency_links.txt')
-
- _requirements_section_re = re.compile(r'\[(.*?)\]')
-
- def requirements(self, extras=()):
- in_extra = None
- for line in self.egg_info_lines('requires.txt'):
- match = self._requirements_section_re.match(line)
- if match:
- in_extra = match.group(1)
- continue
- if in_extra and in_extra not in extras:
- # Skip requirement for an extra we aren't requiring
- continue
- yield line
-
- @property
- def absolute_versions(self):
- for qualifier, version in self.req.specs:
- if qualifier == '==':
- yield version
-
- @property
- def installed_version(self):
- return self.pkg_info()['version']
-
- def assert_source_matches_version(self):
- assert self.source_dir
- if self.comes_from is None:
- # We don't check the versions of things explicitly installed.
- # This makes, e.g., "pip Package==dev" possible
- return
- version = self.installed_version
- if version not in self.req:
- logger.fatal(
- 'Source in %s has the version %s, which does not match the requirement %s'
- % (display_path(self.source_dir), version, self))
- raise InstallationError(
- 'Source in %s has version %s that conflicts with %s'
- % (display_path(self.source_dir), version, self))
- else:
- logger.debug('Source in %s has version %s, which satisfies requirement %s'
- % (display_path(self.source_dir), version, self))
-
- def update_editable(self, obtain=True):
- if not self.url:
- logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir)
- return
- assert self.editable
- assert self.source_dir
- if self.url.startswith('file:'):
- # Static paths don't get updated
- return
- assert '+' in self.url, "bad url: %r" % self.url
- if not self.update:
- return
- vc_type, url = self.url.split('+', 1)
- backend = vcs.get_backend(vc_type)
- if backend:
- vcs_backend = backend(self.url)
- if obtain:
- vcs_backend.obtain(self.source_dir)
- else:
- vcs_backend.export(self.source_dir)
- else:
- assert 0, (
- 'Unexpected version control type (in %s): %s'
- % (self.url, vc_type))
-
- def uninstall(self, auto_confirm=False):
- """
- Uninstall the distribution currently satisfying this requirement.
-
- Prompts before removing or modifying files unless
- ``auto_confirm`` is True.
-
- Refuses to delete or modify files outside of ``sys.prefix`` -
- thus uninstallation within a virtual environment can only
- modify that virtual environment, even if the virtualenv is
- linked to global site-packages.
-
- """
- if not self.check_if_exists():
- raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))
- dist = self.satisfied_by or self.conflicts_with
-
- paths_to_remove = UninstallPathSet(dist)
-
- pip_egg_info_path = os.path.join(dist.location,
- dist.egg_name()) + '.egg-info'
- easy_install_egg = dist.egg_name() + '.egg'
- develop_egg_link = egg_link_path(dist)
- if os.path.exists(pip_egg_info_path):
- # package installed by pip
- paths_to_remove.add(pip_egg_info_path)
- if dist.has_metadata('installed-files.txt'):
- for installed_file in dist.get_metadata('installed-files.txt').splitlines():
- path = os.path.normpath(os.path.join(pip_egg_info_path, installed_file))
- paths_to_remove.add(path)
- if dist.has_metadata('top_level.txt'):
- if dist.has_metadata('namespace_packages.txt'):
- namespaces = dist.get_metadata('namespace_packages.txt')
- else:
- namespaces = []
- for top_level_pkg in [p for p
- in dist.get_metadata('top_level.txt').splitlines()
- if p and p not in namespaces]:
- path = os.path.join(dist.location, top_level_pkg)
- paths_to_remove.add(path)
- paths_to_remove.add(path + '.py')
- paths_to_remove.add(path + '.pyc')
-
- elif dist.location.endswith(easy_install_egg):
- # package installed by easy_install
- paths_to_remove.add(dist.location)
- easy_install_pth = os.path.join(os.path.dirname(dist.location),
- 'easy-install.pth')
- paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
-
- elif os.path.isfile(develop_egg_link):
- # develop egg
- fh = open(develop_egg_link, 'r')
- link_pointer = os.path.normcase(fh.readline().strip())
- fh.close()
- assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
- paths_to_remove.add(develop_egg_link)
- easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
- 'easy-install.pth')
- paths_to_remove.add_pth(easy_install_pth, dist.location)
-
- # find distutils scripts= scripts
- if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
- for script in dist.metadata_listdir('scripts'):
- paths_to_remove.add(os.path.join(bin_py, script))
- if sys.platform == 'win32':
- paths_to_remove.add(os.path.join(bin_py, script) + '.bat')
-
- # find console_scripts
- if dist.has_metadata('entry_points.txt'):
- config = ConfigParser.SafeConfigParser()
- config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
- if config.has_section('console_scripts'):
- for name, value in config.items('console_scripts'):
- paths_to_remove.add(os.path.join(bin_py, name))
- if sys.platform == 'win32':
- paths_to_remove.add(os.path.join(bin_py, name) + '.exe')
- paths_to_remove.add(os.path.join(bin_py, name) + '.exe.manifest')
- paths_to_remove.add(os.path.join(bin_py, name) + '-script.py')
-
- paths_to_remove.remove(auto_confirm)
- self.uninstalled = paths_to_remove
-
- def rollback_uninstall(self):
- if self.uninstalled:
- self.uninstalled.rollback()
- else:
- logger.error("Can't rollback %s, nothing uninstalled."
- % (self.project_name,))
-
- def commit_uninstall(self):
- if self.uninstalled:
- self.uninstalled.commit()
- else:
- logger.error("Can't commit %s, nothing uninstalled."
- % (self.project_name,))
-
- def archive(self, build_dir):
- assert self.source_dir
- create_archive = True
- archive_name = '%s-%s.zip' % (self.name, self.installed_version)
- archive_path = os.path.join(build_dir, archive_name)
- if os.path.exists(archive_path):
- response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup '
- % display_path(archive_path), ('i', 'w', 'b'))
- if response == 'i':
- create_archive = False
- elif response == 'w':
- logger.warn('Deleting %s' % display_path(archive_path))
- os.remove(archive_path)
- elif response == 'b':
- dest_file = backup_dir(archive_path)
- logger.warn('Backing up %s to %s'
- % (display_path(archive_path), display_path(dest_file)))
- shutil.move(archive_path, dest_file)
- if create_archive:
- zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED)
- dir = os.path.normcase(os.path.abspath(self.source_dir))
- for dirpath, dirnames, filenames in os.walk(dir):
- if 'pip-egg-info' in dirnames:
- dirnames.remove('pip-egg-info')
- for dirname in dirnames:
- dirname = os.path.join(dirpath, dirname)
- name = self._clean_zip_name(dirname, dir)
- zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
- zipdir.external_attr = 0755 << 16L
- zip.writestr(zipdir, '')
- for filename in filenames:
- if filename == PIP_DELETE_MARKER_FILENAME:
- continue
- filename = os.path.join(dirpath, filename)
- name = self._clean_zip_name(filename, dir)
- zip.write(filename, self.name + '/' + name)
- zip.close()
- logger.indent -= 2
- logger.notify('Saved %s' % display_path(archive_path))
-
- def _clean_zip_name(self, name, prefix):
- assert name.startswith(prefix+os.path.sep), (
- "name %r doesn't start with prefix %r" % (name, prefix))
- name = name[len(prefix)+1:]
- name = name.replace(os.path.sep, '/')
- return name
-
- def install(self, install_options, global_options=()):
- if self.editable:
- self.install_editable(install_options, global_options)
- return
- temp_location = tempfile.mkdtemp('-record', 'pip-')
- record_filename = os.path.join(temp_location, 'install-record.txt')
- try:
-
- install_args = [
- sys.executable, '-c',
- "import setuptools;__file__=%r;"\
- "execfile(__file__)" % self.setup_py] +\
- list(global_options) + [
- 'install',
- '--single-version-externally-managed',
- '--record', record_filename]
-
- if running_under_virtualenv():
- ## FIXME: I'm not sure if this is a reasonable location; probably not
- ## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable
- install_args += ['--install-headers',
- os.path.join(sys.prefix, 'include', 'site',
- 'python' + get_python_version())]
- logger.notify('Running setup.py install for %s' % self.name)
- logger.indent += 2
- try:
- call_subprocess(install_args + install_options,
- cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False)
- finally:
- logger.indent -= 2
- if not os.path.exists(record_filename):
- logger.notify('Record file %s not found' % record_filename)
- return
- self.install_succeeded = True
- f = open(record_filename)
- for line in f:
- line = line.strip()
- if line.endswith('.egg-info'):
- egg_info_dir = line
- break
- else:
- logger.warn('Could not find .egg-info directory in install record for %s' % self)
- ## FIXME: put the record somewhere
- ## FIXME: should this be an error?
- return
- f.close()
- new_lines = []
- f = open(record_filename)
- for line in f:
- filename = line.strip()
- if os.path.isdir(filename):
- filename += os.path.sep
- new_lines.append(make_path_relative(filename, egg_info_dir))
- f.close()
- f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
- f.write('\n'.join(new_lines)+'\n')
- f.close()
- finally:
- if os.path.exists(record_filename):
- os.remove(record_filename)
- os.rmdir(temp_location)
-
- def remove_temporary_source(self):
- """Remove the source files from this requirement, if they are marked
- for deletion"""
- if self.is_bundle or os.path.exists(self.delete_marker_filename):
- logger.info('Removing source in %s' % self.source_dir)
- if self.source_dir:
- rmtree(self.source_dir)
- self.source_dir = None
- if self._temp_build_dir and os.path.exists(self._temp_build_dir):
- rmtree(self._temp_build_dir)
- self._temp_build_dir = None
-
- def install_editable(self, install_options, global_options=()):
- logger.notify('Running setup.py develop for %s' % self.name)
- logger.indent += 2
- try:
- ## FIXME: should we do --install-headers here too?
- call_subprocess(
- [sys.executable, '-c',
- "import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py)]
- + list(global_options) + ['develop', '--no-deps'] + list(install_options),
-
- cwd=self.source_dir, filter_stdout=self._filter_install,
- show_stdout=False)
- finally:
- logger.indent -= 2
- self.install_succeeded = True
-
- def _filter_install(self, line):
- level = logger.NOTIFY
- for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*',
- r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$",
- r'^byte-compiling ',
- # Not sure what this warning is, but it seems harmless:
- r"^warning: manifest_maker: standard file '-c' not found$"]:
- if re.search(regex, line.strip()):
- level = logger.INFO
- break
- return (level, line)
-
- def check_if_exists(self):
- """Find an installed distribution that satisfies or conflicts
- with this requirement, and set self.satisfied_by or
- self.conflicts_with appropriately."""
- if self.req is None:
- return False
- try:
- self.satisfied_by = pkg_resources.get_distribution(self.req)
- except pkg_resources.DistributionNotFound:
- return False
- except pkg_resources.VersionConflict:
- self.conflicts_with = pkg_resources.get_distribution(self.req.project_name)
- return True
-
- @property
- def is_bundle(self):
- if self._is_bundle is not None:
- return self._is_bundle
- base = self._temp_build_dir
- if not base:
- ## FIXME: this doesn't seem right:
- return False
- self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt'))
- or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt')))
- return self._is_bundle
-
- def bundle_requirements(self):
- for dest_dir in self._bundle_editable_dirs:
- package = os.path.basename(dest_dir)
- ## FIXME: svnism:
- for vcs_backend in vcs.backends:
- url = rev = None
- vcs_bundle_file = os.path.join(
- dest_dir, vcs_backend.bundle_file)
- if os.path.exists(vcs_bundle_file):
- vc_type = vcs_backend.name
- fp = open(vcs_bundle_file)
- content = fp.read()
- fp.close()
- url, rev = vcs_backend().parse_vcs_bundle_file(content)
- break
- if url:
- url = '%s+%s@%s' % (vc_type, url, rev)
- else:
- url = None
- yield InstallRequirement(
- package, self, editable=True, url=url,
- update=False, source_dir=dest_dir)
- for dest_dir in self._bundle_build_dirs:
- package = os.path.basename(dest_dir)
- yield InstallRequirement(
- package, self,
- source_dir=dest_dir)
-
- def move_bundle_files(self, dest_build_dir, dest_src_dir):
- base = self._temp_build_dir
- assert base
- src_dir = os.path.join(base, 'src')
- build_dir = os.path.join(base, 'build')
- bundle_build_dirs = []
- bundle_editable_dirs = []
- for source_dir, dest_dir, dir_collection in [
- (src_dir, dest_src_dir, bundle_editable_dirs),
- (build_dir, dest_build_dir, bundle_build_dirs)]:
- if os.path.exists(source_dir):
- for dirname in os.listdir(source_dir):
- dest = os.path.join(dest_dir, dirname)
- dir_collection.append(dest)
- if os.path.exists(dest):
- logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s'
- % (dest, dirname, self))
- continue
- if not os.path.exists(dest_dir):
- logger.info('Creating directory %s' % dest_dir)
- os.makedirs(dest_dir)
- shutil.move(os.path.join(source_dir, dirname), dest)
- if not os.listdir(source_dir):
- os.rmdir(source_dir)
- self._temp_build_dir = None
- self._bundle_build_dirs = bundle_build_dirs
- self._bundle_editable_dirs = bundle_editable_dirs
-
- @property
- def delete_marker_filename(self):
- assert self.source_dir
- return os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)
-
-
-DELETE_MARKER_MESSAGE = '''\
-This file is placed here by pip to indicate the source was put
-here by pip.
-
-Once this package is successfully installed this source code will be
-deleted (unless you remove this file).
-'''
-
-
-class RequirementSet(object):
-
- def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
- upgrade=False, ignore_installed=False,
- ignore_dependencies=False):
- self.build_dir = build_dir
- self.src_dir = src_dir
- self.download_dir = download_dir
- self.download_cache = download_cache
- self.upgrade = upgrade
- self.ignore_installed = ignore_installed
- self.requirements = {}
- # Mapping of alias: real_name
- self.requirement_aliases = {}
- self.unnamed_requirements = []
- self.ignore_dependencies = ignore_dependencies
- self.successfully_downloaded = []
- self.successfully_installed = []
- self.reqs_to_cleanup = []
-
- def __str__(self):
- reqs = [req for req in self.requirements.values()
- if not req.comes_from]
- reqs.sort(key=lambda req: req.name.lower())
- return ' '.join([str(req.req) for req in reqs])
-
- def add_requirement(self, install_req):
- name = install_req.name
- if not name:
- self.unnamed_requirements.append(install_req)
- else:
- if self.has_requirement(name):
- raise InstallationError(
- 'Double requirement given: %s (aready in %s, name=%r)'
- % (install_req, self.get_requirement(name), name))
- self.requirements[name] = install_req
- ## FIXME: what about other normalizations? E.g., _ vs. -?
- if name.lower() != name:
- self.requirement_aliases[name.lower()] = name
-
- def has_requirement(self, project_name):
- for name in project_name, project_name.lower():
- if name in self.requirements or name in self.requirement_aliases:
- return True
- return False
-
- @property
- def has_requirements(self):
- return self.requirements.values() or self.unnamed_requirements
-
- @property
- def has_editables(self):
- if any(req.editable for req in self.requirements.values()):
- return True
- if any(req.editable for req in self.unnamed_requirements):
- return True
- return False
-
- @property
- def is_download(self):
- if self.download_dir:
- self.download_dir = os.path.expanduser(self.download_dir)
- if os.path.exists(self.download_dir):
- return True
- else:
- logger.fatal('Could not find download directory')
- raise InstallationError(
- "Could not find or access download directory '%s'"
- % display_path(self.download_dir))
- return False
-
- def get_requirement(self, project_name):
- for name in project_name, project_name.lower():
- if name in self.requirements:
- return self.requirements[name]
- if name in self.requirement_aliases:
- return self.requirements[self.requirement_aliases[name]]
- raise KeyError("No project with the name %r" % project_name)
-
- def uninstall(self, auto_confirm=False):
- for req in self.requirements.values():
- req.uninstall(auto_confirm=auto_confirm)
- req.commit_uninstall()
-
- def locate_files(self):
- ## FIXME: duplicates code from install_files; relevant code should
- ## probably be factored out into a separate method
- unnamed = list(self.unnamed_requirements)
- reqs = self.requirements.values()
- while reqs or unnamed:
- if unnamed:
- req_to_install = unnamed.pop(0)
- else:
- req_to_install = reqs.pop(0)
- install_needed = True
- if not self.ignore_installed and not req_to_install.editable:
- req_to_install.check_if_exists()
- if req_to_install.satisfied_by:
- if self.upgrade:
- req_to_install.conflicts_with = req_to_install.satisfied_by
- req_to_install.satisfied_by = None
- else:
- install_needed = False
- if req_to_install.satisfied_by:
- logger.notify('Requirement already satisfied '
- '(use --upgrade to upgrade): %s'
- % req_to_install)
-
- if req_to_install.editable:
- if req_to_install.source_dir is None:
- req_to_install.source_dir = req_to_install.build_location(self.src_dir)
- elif install_needed:
- req_to_install.source_dir = req_to_install.build_location(self.build_dir, not self.is_download)
-
- if req_to_install.source_dir is not None and not os.path.isdir(req_to_install.source_dir):
- raise InstallationError('Could not install requirement %s '
- 'because source folder %s does not exist '
- '(perhaps --no-download was used without first running '
- 'an equivalent install with --no-install?)'
- % (req_to_install, req_to_install.source_dir))
-
- def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
- """Prepare process. Create temp directories, download and/or unpack files."""
- unnamed = list(self.unnamed_requirements)
- reqs = self.requirements.values()
- while reqs or unnamed:
- if unnamed:
- req_to_install = unnamed.pop(0)
- else:
- req_to_install = reqs.pop(0)
- install = True
- if not self.ignore_installed and not req_to_install.editable:
- req_to_install.check_if_exists()
- if req_to_install.satisfied_by:
- if self.upgrade:
- req_to_install.conflicts_with = req_to_install.satisfied_by
- req_to_install.satisfied_by = None
- else:
- install = False
- if req_to_install.satisfied_by:
- logger.notify('Requirement already satisfied '
- '(use --upgrade to upgrade): %s'
- % req_to_install)
- if req_to_install.editable:
- logger.notify('Obtaining %s' % req_to_install)
- elif install:
- if req_to_install.url and req_to_install.url.lower().startswith('file:'):
- logger.notify('Unpacking %s' % display_path(url_to_path(req_to_install.url)))
- else:
- logger.notify('Downloading/unpacking %s' % req_to_install)
- logger.indent += 2
- try:
- is_bundle = False
- if req_to_install.editable:
- if req_to_install.source_dir is None:
- location = req_to_install.build_location(self.src_dir)
- req_to_install.source_dir = location
- else:
- location = req_to_install.source_dir
- if not os.path.exists(self.build_dir):
- _make_build_dir(self.build_dir)
- req_to_install.update_editable(not self.is_download)
- if self.is_download:
- req_to_install.run_egg_info()
- req_to_install.archive(self.download_dir)
- else:
- req_to_install.run_egg_info()
- elif install:
- ##@@ if filesystem packages are not marked
- ##editable in a req, a non deterministic error
- ##occurs when the script attempts to unpack the
- ##build directory
-
- location = req_to_install.build_location(self.build_dir, not self.is_download)
- ## FIXME: is the existance of the checkout good enough to use it? I don't think so.
- unpack = True
- if not os.path.exists(os.path.join(location, 'setup.py')):
- ## FIXME: this won't upgrade when there's an existing package unpacked in `location`
- if req_to_install.url is None:
- url = finder.find_requirement(req_to_install, upgrade=self.upgrade)
- else:
- ## FIXME: should req_to_install.url already be a link?
- url = Link(req_to_install.url)
- assert url
- if url:
- try:
- self.unpack_url(url, location, self.is_download)
- except urllib2.HTTPError, e:
- logger.fatal('Could not install requirement %s because of error %s'
- % (req_to_install, e))
- raise InstallationError(
- 'Could not install requirement %s because of HTTP error %s for URL %s'
- % (req_to_install, e, url))
- else:
- unpack = False
- if unpack:
- is_bundle = req_to_install.is_bundle
- url = None
- if is_bundle:
- req_to_install.move_bundle_files(self.build_dir, self.src_dir)
- for subreq in req_to_install.bundle_requirements():
- reqs.append(subreq)
- self.add_requirement(subreq)
- elif self.is_download:
- req_to_install.source_dir = location
- if url and url.scheme in vcs.all_schemes:
- req_to_install.run_egg_info()
- req_to_install.archive(self.download_dir)
- else:
- req_to_install.source_dir = location
- req_to_install.run_egg_info()
- if force_root_egg_info:
- # We need to run this to make sure that the .egg-info/
- # directory is created for packing in the bundle
- req_to_install.run_egg_info(force_root_egg_info=True)
- req_to_install.assert_source_matches_version()
- #@@ sketchy way of identifying packages not grabbed from an index
- if bundle and req_to_install.url:
- self.copy_to_build_dir(req_to_install)
- if not is_bundle and not self.is_download:
- ## FIXME: shouldn't be globally added:
- finder.add_dependency_links(req_to_install.dependency_links)
- ## FIXME: add extras in here:
- if not self.ignore_dependencies:
- for req in req_to_install.requirements():
- try:
- name = pkg_resources.Requirement.parse(req).project_name
- except ValueError, e:
- ## FIXME: proper warning
- logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install))
- continue
- if self.has_requirement(name):
- ## FIXME: check for conflict
- continue
- subreq = InstallRequirement(req, req_to_install)
- reqs.append(subreq)
- self.add_requirement(subreq)
- if req_to_install.name not in self.requirements:
- self.requirements[req_to_install.name] = req_to_install
- else:
- self.reqs_to_cleanup.append(req_to_install)
- if install:
- self.successfully_downloaded.append(req_to_install)
- if bundle and (req_to_install.url and req_to_install.url.startswith('file:///')):
- self.copy_to_build_dir(req_to_install)
- finally:
- logger.indent -= 2
-
- def cleanup_files(self, bundle=False):
- """Clean up files, remove builds."""
- logger.notify('Cleaning up...')
- logger.indent += 2
- for req in self.reqs_to_cleanup:
- req.remove_temporary_source()
-
- remove_dir = []
- if self._pip_has_created_build_dir():
- remove_dir.append(self.build_dir)
-
- # The source dir of a bundle can always be removed.
- if bundle:
- remove_dir.append(self.src_dir)
-
- for dir in remove_dir:
- if os.path.exists(dir):
- logger.info('Removing temporary dir %s...' % dir)
- rmtree(dir)
-
- logger.indent -= 2
-
- def _pip_has_created_build_dir(self):
- return (self.build_dir == build_prefix and
- os.path.exists(os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME)))
-
- def copy_to_build_dir(self, req_to_install):
- target_dir = req_to_install.editable and self.src_dir or self.build_dir
- logger.info("Copying %s to %s" %(req_to_install.name, target_dir))
- dest = os.path.join(target_dir, req_to_install.name)
- copytree(req_to_install.source_dir, dest)
- call_subprocess(["python", "%s/setup.py"%dest, "clean"])
-
- def unpack_url(self, link, location, only_download=False):
- if only_download:
- location = self.download_dir
- if is_vcs_url(link):
- return unpack_vcs_link(link, location, only_download)
- elif is_file_url(link):
- return unpack_file_url(link, location)
- else:
- if self.download_cache:
- self.download_cache = os.path.expanduser(self.download_cache)
- return unpack_http_url(link, location, self.download_cache, only_download)
-
- def install(self, install_options, global_options=()):
- """Install everything in this set (after having downloaded and unpacked the packages)"""
- to_install = sorted([r for r in self.requirements.values()
- if self.upgrade or not r.satisfied_by],
- key=lambda p: p.name.lower())
- if to_install:
- logger.notify('Installing collected packages: %s' % (', '.join([req.name for req in to_install])))
- logger.indent += 2
- try:
- for requirement in to_install:
- if requirement.conflicts_with:
- logger.notify('Found existing installation: %s'
- % requirement.conflicts_with)
- logger.indent += 2
- try:
- requirement.uninstall(auto_confirm=True)
- finally:
- logger.indent -= 2
- try:
- requirement.install(install_options, global_options)
- except:
- # if install did not succeed, rollback previous uninstall
- if requirement.conflicts_with and not requirement.install_succeeded:
- requirement.rollback_uninstall()
- raise
- else:
- if requirement.conflicts_with and requirement.install_succeeded:
- requirement.commit_uninstall()
- requirement.remove_temporary_source()
- finally:
- logger.indent -= 2
- self.successfully_installed = to_install
-
- def create_bundle(self, bundle_filename):
- ## FIXME: can't decide which is better; zip is easier to read
- ## random files from, but tar.bz2 is smaller and not as lame a
- ## format.
-
- ## FIXME: this file should really include a manifest of the
- ## packages, maybe some other metadata files. It would make
- ## it easier to detect as well.
- zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED)
- vcs_dirs = []
- for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'):
- dir = os.path.normcase(os.path.abspath(dir))
- for dirpath, dirnames, filenames in os.walk(dir):
- for backend in vcs.backends:
- vcs_backend = backend()
- vcs_url = vcs_rev = None
- if vcs_backend.dirname in dirnames:
- for vcs_dir in vcs_dirs:
- if dirpath.startswith(vcs_dir):
- # vcs bundle file already in parent directory
- break
- else:
- vcs_url, vcs_rev = vcs_backend.get_info(
- os.path.join(dir, dirpath))
- vcs_dirs.append(dirpath)
- vcs_bundle_file = vcs_backend.bundle_file
- vcs_guide = vcs_backend.guide % {'url': vcs_url,
- 'rev': vcs_rev}
- dirnames.remove(vcs_backend.dirname)
- break
- if 'pip-egg-info' in dirnames:
- dirnames.remove('pip-egg-info')
- for dirname in dirnames:
- dirname = os.path.join(dirpath, dirname)
- name = self._clean_zip_name(dirname, dir)
- zip.writestr(basename + '/' + name + '/', '')
- for filename in filenames:
- if filename == PIP_DELETE_MARKER_FILENAME:
- continue
- filename = os.path.join(dirpath, filename)
- name = self._clean_zip_name(filename, dir)
- zip.write(filename, basename + '/' + name)
- if vcs_url:
- name = os.path.join(dirpath, vcs_bundle_file)
- name = self._clean_zip_name(name, dir)
- zip.writestr(basename + '/' + name, vcs_guide)
-
- zip.writestr('pip-manifest.txt', self.bundle_requirements())
- zip.close()
-
- BUNDLE_HEADER = '''\
-# This is a pip bundle file, that contains many source packages
-# that can be installed as a group. You can install this like:
-# pip this_file.zip
-# The rest of the file contains a list of all the packages included:
-'''
-
- def bundle_requirements(self):
- parts = [self.BUNDLE_HEADER]
- for req in sorted(
- [req for req in self.requirements.values()
- if not req.comes_from],
- key=lambda x: x.name):
- parts.append('%s==%s\n' % (req.name, req.installed_version))
- parts.append('# These packages were installed to satisfy the above requirements:\n')
- for req in sorted(
- [req for req in self.requirements.values()
- if req.comes_from],
- key=lambda x: x.name):
- parts.append('%s==%s\n' % (req.name, req.installed_version))
- ## FIXME: should we do something with self.unnamed_requirements?
- return ''.join(parts)
-
- def _clean_zip_name(self, name, prefix):
- assert name.startswith(prefix+os.path.sep), (
- "name %r doesn't start with prefix %r" % (name, prefix))
- name = name[len(prefix)+1:]
- name = name.replace(os.path.sep, '/')
- return name
-
-
-def _make_build_dir(build_dir):
- os.makedirs(build_dir)
- _write_delete_marker_message(os.path.join(build_dir, PIP_DELETE_MARKER_FILENAME))
-
-
-def _write_delete_marker_message(filepath):
- marker_fp = open(filepath, 'w')
- marker_fp.write(DELETE_MARKER_MESSAGE)
- marker_fp.close()
-
-
-_scheme_re = re.compile(r'^(http|https|file):', re.I)
-
-
-def parse_requirements(filename, finder=None, comes_from=None, options=None):
- skip_match = None
- skip_regex = options.skip_requirements_regex
- if skip_regex:
- skip_match = re.compile(skip_regex)
- filename, content = get_file_content(filename, comes_from=comes_from)
- for line_number, line in enumerate(content.splitlines()):
- line_number += 1
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- if skip_match and skip_match.search(line):
- continue
- if line.startswith('-r') or line.startswith('--requirement'):
- if line.startswith('-r'):
- req_url = line[2:].strip()
- else:
- req_url = line[len('--requirement'):].strip().strip('=')
- if _scheme_re.search(filename):
- # Relative to a URL
- req_url = urlparse.urljoin(req_url, filename)
- elif not _scheme_re.search(req_url):
- req_url = os.path.join(os.path.dirname(filename), req_url)
- for item in parse_requirements(req_url, finder, comes_from=filename, options=options):
- yield item
- elif line.startswith('-Z') or line.startswith('--always-unzip'):
- # No longer used, but previously these were used in
- # requirement files, so we'll ignore.
- pass
- elif line.startswith('-f') or line.startswith('--find-links'):
- if line.startswith('-f'):
- line = line[2:].strip()
- else:
- line = line[len('--find-links'):].strip().lstrip('=')
- ## FIXME: it would be nice to keep track of the source of
- ## the find_links:
- if finder:
- finder.find_links.append(line)
- elif line.startswith('-i') or line.startswith('--index-url'):
- if line.startswith('-i'):
- line = line[2:].strip()
- else:
- line = line[len('--index-url'):].strip().lstrip('=')
- if finder:
- finder.index_urls = [line]
- elif line.startswith('--extra-index-url'):
- line = line[len('--extra-index-url'):].strip().lstrip('=')
- if finder:
- finder.index_urls.append(line)
- else:
- comes_from = '-r %s (line %s)' % (filename, line_number)
- if line.startswith('-e') or line.startswith('--editable'):
- if line.startswith('-e'):
- line = line[2:].strip()
- else:
- line = line[len('--editable'):].strip()
- req = InstallRequirement.from_editable(
- line, comes_from=comes_from, default_vcs=options.default_vcs)
- else:
- req = InstallRequirement.from_line(line, comes_from)
- yield req
-
-
-def parse_editable(editable_req, default_vcs=None):
- """Parses svn+http://blahblah@rev#egg=Foobar into a requirement
- (Foobar) and a URL"""
- url = editable_req
- if os.path.isdir(url) and os.path.exists(os.path.join(url, 'setup.py')):
- # Treating it as code that has already been checked out
- url = path_to_url(url)
- if url.lower().startswith('file:'):
- return None, url
- for version_control in vcs:
- if url.lower().startswith('%s:' % version_control):
- url = '%s+%s' % (version_control, url)
- if '+' not in url:
- if default_vcs:
- url = default_vcs + '+' + url
- else:
- raise InstallationError(
- '--editable=%s should be formatted with svn+URL, git+URL, hg+URL or bzr+URL' % editable_req)
- vc_type = url.split('+', 1)[0].lower()
- if not vcs.get_backend(vc_type):
- raise InstallationError(
- 'For --editable=%s only svn (svn+URL), Git (git+URL), Mercurial (hg+URL) and Bazaar (bzr+URL) is currently supported' % editable_req)
- match = re.search(r'(?:#|#.*?&)egg=([^&]*)', editable_req)
- if (not match or not match.group(1)) and vcs.get_backend(vc_type):
- parts = [p for p in editable_req.split('#', 1)[0].split('/') if p]
- if parts[-2] in ('tags', 'branches', 'tag', 'branch'):
- req = parts[-3]
- elif parts[-1] == 'trunk':
- req = parts[-2]
- else:
- raise InstallationError(
- '--editable=%s is not the right format; it must have #egg=Package'
- % editable_req)
- else:
- req = match.group(1)
- ## FIXME: use package_to_requirement?
- match = re.search(r'^(.*?)(?:-dev|-\d.*)', req)
- if match:
- # Strip off -dev, -0.2, etc.
- req = match.group(1)
- return req, url
-
-
-class UninstallPathSet(object):
- """A set of file paths to be removed in the uninstallation of a
- requirement."""
- def __init__(self, dist):
- self.paths = set()
- self._refuse = set()
- self.pth = {}
- self.dist = dist
- self.save_dir = None
- self._moved_paths = []
-
- def _permitted(self, path):
- """
- Return True if the given path is one we are permitted to
- remove/modify, False otherwise.
-
- """
- return is_local(path)
-
- def _can_uninstall(self):
- if not dist_is_local(self.dist):
- logger.notify("Not uninstalling %s at %s, outside environment %s"
- % (self.dist.project_name, normalize_path(self.dist.location), sys.prefix))
- return False
- return True
-
- def add(self, path):
- path = normalize_path(path)
- if not os.path.exists(path):
- return
- if self._permitted(path):
- self.paths.add(path)
- else:
- self._refuse.add(path)
-
- def add_pth(self, pth_file, entry):
- pth_file = normalize_path(pth_file)
- if self._permitted(pth_file):
- if pth_file not in self.pth:
- self.pth[pth_file] = UninstallPthEntries(pth_file)
- self.pth[pth_file].add(entry)
- else:
- self._refuse.add(pth_file)
-
- def compact(self, paths):
- """Compact a path set to contain the minimal number of paths
- necessary to contain all paths in the set. If /a/path/ and
- /a/path/to/a/file.txt are both in the set, leave only the
- shorter path."""
- short_paths = set()
- for path in sorted(paths, key=len):
- if not any([(path.startswith(shortpath) and
- path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
- for shortpath in short_paths]):
- short_paths.add(path)
- return short_paths
-
- def _stash(self, path):
- return os.path.join(
- self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))
-
- def remove(self, auto_confirm=False):
- """Remove paths in ``self.paths`` with confirmation (unless
- ``auto_confirm`` is True)."""
- if not self._can_uninstall():
- return
- logger.notify('Uninstalling %s:' % self.dist.project_name)
- logger.indent += 2
- paths = sorted(self.compact(self.paths))
- try:
- if auto_confirm:
- response = 'y'
- else:
- for path in paths:
- logger.notify(path)
- response = ask('Proceed (y/n)? ', ('y', 'n'))
- if self._refuse:
- logger.notify('Not removing or modifying (outside of prefix):')
- for path in self.compact(self._refuse):
- logger.notify(path)
- if response == 'y':
- self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
- prefix='pip-')
- for path in paths:
- new_path = self._stash(path)
- logger.info('Removing file or directory %s' % path)
- self._moved_paths.append(path)
- renames(path, new_path)
- for pth in self.pth.values():
- pth.remove()
- logger.notify('Successfully uninstalled %s' % self.dist.project_name)
-
- finally:
- logger.indent -= 2
-
- def rollback(self):
- """Rollback the changes previously made by remove()."""
- if self.save_dir is None:
- logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name)
- return False
- logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
- for path in self._moved_paths:
- tmp_path = self._stash(path)
- logger.info('Replacing %s' % path)
- renames(tmp_path, path)
- for pth in self.pth:
- pth.rollback()
-
- def commit(self):
- """Remove temporary save dir: rollback will no longer be possible."""
- if self.save_dir is not None:
- shutil.rmtree(self.save_dir)
- self.save_dir = None
- self._moved_paths = []
-
-
-class UninstallPthEntries(object):
- def __init__(self, pth_file):
- if not os.path.isfile(pth_file):
- raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file)
- self.file = pth_file
- self.entries = set()
- self._saved_lines = None
-
- def add(self, entry):
- entry = os.path.normcase(entry)
- # On Windows, os.path.normcase converts the entry to use
- # backslashes. This is correct for entries that describe absolute
- # paths outside of site-packages, but all the others use forward
- # slashes.
- if sys.platform == 'win32' and not os.path.splitdrive(entry)[0]:
- entry = entry.replace('\\', '/')
- self.entries.add(entry)
-
- def remove(self):
- logger.info('Removing pth entries from %s:' % self.file)
- fh = open(self.file, 'r')
- lines = fh.readlines()
- self._saved_lines = lines
- fh.close()
- try:
- for entry in self.entries:
- logger.info('Removing entry: %s' % entry)
- try:
- lines.remove(entry + '\n')
- except ValueError:
- pass
- finally:
- pass
- fh = open(self.file, 'wb')
- fh.writelines(lines)
- fh.close()
-
- def rollback(self):
- if self._saved_lines is None:
- logger.error('Cannot roll back changes to %s, none were made' % self.file)
- return False
- logger.info('Rolling %s back to previous state' % self.file)
- fh = open(self.file, 'wb')
- fh.writelines(self._saved_lines)
- fh.close()
- return True
-
-
-class FakeFile(object):
- """Wrap a list of lines in an object with readline() to make
- ConfigParser happy."""
- def __init__(self, lines):
- self._gen = (l for l in lines)
-
- def readline(self):
- try:
- return self._gen.next()
- except StopIteration:
- return ''
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py
deleted file mode 100755
index be830ad9..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import sys
-import os
-
-
-def run():
- base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
- ## FIXME: this is kind of crude; if we could create a fake pip
- ## module, then exec into it and update pip.__path__ properly, we
- ## wouldn't have to update sys.path:
- sys.path.insert(0, base)
- import pip
- return pip.main()
-
-
-if __name__ == '__main__':
- exit = run()
- if exit:
- sys.exit(exit)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py
deleted file mode 100755
index 1eab34c0..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py
+++ /dev/null
@@ -1,479 +0,0 @@
-import sys
-import shutil
-import os
-import stat
-import re
-import posixpath
-import pkg_resources
-import zipfile
-import tarfile
-from pip.exceptions import InstallationError
-from pip.backwardcompat import WindowsError
-from pip.locations import site_packages, running_under_virtualenv
-from pip.log import logger
-
-__all__ = ['rmtree', 'display_path', 'backup_dir',
- 'find_command', 'ask', 'Inf',
- 'normalize_name', 'splitext',
- 'format_size', 'is_installable_dir',
- 'is_svn_page', 'file_contents',
- 'split_leading_dir', 'has_leading_dir',
- 'make_path_relative', 'normalize_path',
- 'renames', 'get_terminal_size',
- 'unzip_file', 'untar_file', 'create_download_cache_folder',
- 'cache_download', 'unpack_file']
-
-
-def rmtree(dir):
- shutil.rmtree(dir, ignore_errors=True,
- onerror=rmtree_errorhandler)
-
-
-def rmtree_errorhandler(func, path, exc_info):
- """On Windows, the files in .svn are read-only, so when rmtree() tries to
- remove them, an exception is thrown. We catch that here, remove the
- read-only attribute, and hopefully continue without problems."""
- exctype, value = exc_info[:2]
- # lookin for a windows error
- if exctype is not WindowsError or 'Access is denied' not in str(value):
- raise
- # file type should currently be read only
- if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD):
- raise
- # convert to read/write
- os.chmod(path, stat.S_IWRITE)
- # use the original function to repeat the operation
- func(path)
-
-
-def display_path(path):
- """Gives the display value for a given path, making it relative to cwd
- if possible."""
- path = os.path.normcase(os.path.abspath(path))
- if path.startswith(os.getcwd() + os.path.sep):
- path = '.' + path[len(os.getcwd()):]
- return path
-
-
-def backup_dir(dir, ext='.bak'):
- """Figure out the name of a directory to back up the given dir to
- (adding .bak, .bak2, etc)"""
- n = 1
- extension = ext
- while os.path.exists(dir + extension):
- n += 1
- extension = ext + str(n)
- return dir + extension
-
-
-def find_command(cmd, paths=None, pathext=None):
- """Searches the PATH for the given command and returns its path"""
- if paths is None:
- paths = os.environ.get('PATH', []).split(os.pathsep)
- if isinstance(paths, basestring):
- paths = [paths]
- # check if there are funny path extensions for executables, e.g. Windows
- if pathext is None:
- pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
- pathext = [ext for ext in pathext.lower().split(os.pathsep)]
- # don't use extensions if the command ends with one of them
- if os.path.splitext(cmd)[1].lower() in pathext:
- pathext = ['']
- # check if we find the command on PATH
- for path in paths:
- # try without extension first
- cmd_path = os.path.join(path, cmd)
- for ext in pathext:
- # then including the extension
- cmd_path_ext = cmd_path + ext
- if os.path.exists(cmd_path_ext):
- return cmd_path_ext
- if os.path.exists(cmd_path):
- return cmd_path
- return None
-
-
-def ask(message, options):
- """Ask the message interactively, with the given possible responses"""
- while 1:
- if os.environ.get('PIP_NO_INPUT'):
- raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
- response = raw_input(message)
- response = response.strip().lower()
- if response not in options:
- print 'Your response (%r) was not one of the expected responses: %s' % (
- response, ', '.join(options))
- else:
- return response
-
-
-class _Inf(object):
- """I am bigger than everything!"""
- def __cmp__(self, a):
- if self is a:
- return 0
- return 1
-
- def __repr__(self):
- return 'Inf'
-
-Inf = _Inf()
-del _Inf
-
-
-_normalize_re = re.compile(r'[^a-z]', re.I)
-
-
-def normalize_name(name):
- return _normalize_re.sub('-', name.lower())
-
-
-def format_size(bytes):
- if bytes > 1000*1000:
- return '%.1fMb' % (bytes/1000.0/1000)
- elif bytes > 10*1000:
- return '%iKb' % (bytes/1000)
- elif bytes > 1000:
- return '%.1fKb' % (bytes/1000.0)
- else:
- return '%ibytes' % bytes
-
-
-def is_installable_dir(path):
- """Return True if `path` is a directory containing a setup.py file."""
- if not os.path.isdir(path):
- return False
- setup_py = os.path.join(path, 'setup.py')
- if os.path.isfile(setup_py):
- return True
- return False
-
-
-def is_svn_page(html):
- """Returns true if the page appears to be the index page of an svn repository"""
- return (re.search(r'<title>[^<]*Revision \d+:', html)
- and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
-
-
-def file_contents(filename):
- fp = open(filename, 'rb')
- try:
- return fp.read()
- finally:
- fp.close()
-
-
-def split_leading_dir(path):
- path = str(path)
- path = path.lstrip('/').lstrip('\\')
- if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
- or '\\' not in path):
- return path.split('/', 1)
- elif '\\' in path:
- return path.split('\\', 1)
- else:
- return path, ''
-
-
-def has_leading_dir(paths):
- """Returns true if all the paths have the same leading path name
- (i.e., everything is in one subdirectory in an archive)"""
- common_prefix = None
- for path in paths:
- prefix, rest = split_leading_dir(path)
- if not prefix:
- return False
- elif common_prefix is None:
- common_prefix = prefix
- elif prefix != common_prefix:
- return False
- return True
-
-
-def make_path_relative(path, rel_to):
- """
- Make a filename relative, where the filename path, and it is
- relative to rel_to
-
- >>> make_relative_path('/usr/share/something/a-file.pth',
- ... '/usr/share/another-place/src/Directory')
- '../../../something/a-file.pth'
- >>> make_relative_path('/usr/share/something/a-file.pth',
- ... '/home/user/src/Directory')
- '../../../usr/share/something/a-file.pth'
- >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
- 'a-file.pth'
- """
- path_filename = os.path.basename(path)
- path = os.path.dirname(path)
- path = os.path.normpath(os.path.abspath(path))
- rel_to = os.path.normpath(os.path.abspath(rel_to))
- path_parts = path.strip(os.path.sep).split(os.path.sep)
- rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
- while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
- path_parts.pop(0)
- rel_to_parts.pop(0)
- full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
- if full_parts == ['']:
- return '.' + os.path.sep
- return os.path.sep.join(full_parts)
-
-
-def normalize_path(path):
- """
- Convert a path to its canonical, case-normalized, absolute version.
-
- """
- return os.path.normcase(os.path.realpath(path))
-
-
-def splitext(path):
- """Like os.path.splitext, but take off .tar too"""
- base, ext = posixpath.splitext(path)
- if base.lower().endswith('.tar'):
- ext = base[-4:] + ext
- base = base[:-4]
- return base, ext
-
-
-def renames(old, new):
- """Like os.renames(), but handles renaming across devices."""
- # Implementation borrowed from os.renames().
- head, tail = os.path.split(new)
- if head and tail and not os.path.exists(head):
- os.makedirs(head)
-
- shutil.move(old, new)
-
- head, tail = os.path.split(old)
- if head and tail:
- try:
- os.removedirs(head)
- except OSError:
- pass
-
-
-def is_local(path):
- """
- Return True if path is within sys.prefix, if we're running in a virtualenv.
-
- If we're not in a virtualenv, all paths are considered "local."
-
- """
- if not running_under_virtualenv():
- return True
- return normalize_path(path).startswith(normalize_path(sys.prefix))
-
-
-def dist_is_local(dist):
- """
- Return True if given Distribution object is installed locally
- (i.e. within current virtualenv).
-
- Always True if we're not in a virtualenv.
-
- """
- return is_local(dist_location(dist))
-
-
-def get_installed_distributions(local_only=True, skip=('setuptools', 'pip', 'python')):
- """
- Return a list of installed Distribution objects.
-
- If ``local_only`` is True (default), only return installations
- local to the current virtualenv, if in a virtualenv.
-
- ``skip`` argument is an iterable of lower-case project names to
- ignore; defaults to ('setuptools', 'pip', 'python'). [FIXME also
- skip virtualenv?]
-
- """
- if local_only:
- local_test = dist_is_local
- else:
- local_test = lambda d: True
- return [d for d in pkg_resources.working_set if local_test(d) and d.key not in skip]
-
-
-def egg_link_path(dist):
- """
- Return the path where we'd expect to find a .egg-link file for
- this distribution. (There doesn't seem to be any metadata in the
- Distribution object for a develop egg that points back to its
- .egg-link and easy-install.pth files).
-
- This won't find a globally-installed develop egg if we're in a
- virtualenv.
-
- """
- return os.path.join(site_packages, dist.project_name) + '.egg-link'
-
-
-def dist_location(dist):
- """
- Get the site-packages location of this distribution. Generally
- this is dist.location, except in the case of develop-installed
- packages, where dist.location is the source code location, and we
- want to know where the egg-link file is.
-
- """
- egg_link = egg_link_path(dist)
- if os.path.exists(egg_link):
- return egg_link
- return dist.location
-
-
-def get_terminal_size():
- """Returns a tuple (x, y) representing the width(x) and the height(x)
- in characters of the terminal window."""
- def ioctl_GWINSZ(fd):
- try:
- import fcntl
- import termios
- import struct
- cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
- '1234'))
- except:
- return None
- if cr == (0, 0):
- return None
- if cr == (0, 0):
- return None
- return cr
- cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
- if not cr:
- try:
- fd = os.open(os.ctermid(), os.O_RDONLY)
- cr = ioctl_GWINSZ(fd)
- os.close(fd)
- except:
- pass
- if not cr:
- cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
- return int(cr[1]), int(cr[0])
-
-
-def unzip_file(filename, location, flatten=True):
- """Unzip the file (zip file located at filename) to the destination
- location"""
- if not os.path.exists(location):
- os.makedirs(location)
- zipfp = open(filename, 'rb')
- try:
- zip = zipfile.ZipFile(zipfp)
- leading = has_leading_dir(zip.namelist()) and flatten
- for name in zip.namelist():
- data = zip.read(name)
- fn = name
- if leading:
- fn = split_leading_dir(name)[1]
- fn = os.path.join(location, fn)
- dir = os.path.dirname(fn)
- if not os.path.exists(dir):
- os.makedirs(dir)
- if fn.endswith('/') or fn.endswith('\\'):
- # A directory
- if not os.path.exists(fn):
- os.makedirs(fn)
- else:
- fp = open(fn, 'wb')
- try:
- fp.write(data)
- finally:
- fp.close()
- finally:
- zipfp.close()
-
-
-def untar_file(filename, location):
- """Untar the file (tar file located at filename) to the destination location"""
- if not os.path.exists(location):
- os.makedirs(location)
- if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
- mode = 'r:gz'
- elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
- mode = 'r:bz2'
- elif filename.lower().endswith('.tar'):
- mode = 'r'
- else:
- logger.warn('Cannot determine compression type for file %s' % filename)
- mode = 'r:*'
- tar = tarfile.open(filename, mode)
- try:
- # note: python<=2.5 doesnt seem to know about pax headers, filter them
- leading = has_leading_dir([
- member.name for member in tar.getmembers()
- if member.name != 'pax_global_header'
- ])
- for member in tar.getmembers():
- fn = member.name
- if fn == 'pax_global_header':
- continue
- if leading:
- fn = split_leading_dir(fn)[1]
- path = os.path.join(location, fn)
- if member.isdir():
- if not os.path.exists(path):
- os.makedirs(path)
- else:
- try:
- fp = tar.extractfile(member)
- except (KeyError, AttributeError), e:
- # Some corrupt tar files seem to produce this
- # (specifically bad symlinks)
- logger.warn(
- 'In the tar file %s the member %s is invalid: %s'
- % (filename, member.name, e))
- continue
- if not os.path.exists(os.path.dirname(path)):
- os.makedirs(os.path.dirname(path))
- destfp = open(path, 'wb')
- try:
- shutil.copyfileobj(fp, destfp)
- finally:
- destfp.close()
- fp.close()
- finally:
- tar.close()
-
-
-def create_download_cache_folder(folder):
- logger.indent -= 2
- logger.notify('Creating supposed download cache at %s' % folder)
- logger.indent += 2
- os.makedirs(folder)
-
-
-def cache_download(target_file, temp_location, content_type):
- logger.notify('Storing download in cache at %s' % display_path(target_file))
- shutil.copyfile(temp_location, target_file)
- fp = open(target_file+'.content-type', 'w')
- fp.write(content_type)
- fp.close()
- os.unlink(temp_location)
-
-
-def unpack_file(filename, location, content_type, link):
- if (content_type == 'application/zip'
- or filename.endswith('.zip')
- or filename.endswith('.pybundle')
- or zipfile.is_zipfile(filename)):
- unzip_file(filename, location, flatten=not filename.endswith('.pybundle'))
- elif (content_type == 'application/x-gzip'
- or tarfile.is_tarfile(filename)
- or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
- untar_file(filename, location)
- elif (content_type and content_type.startswith('text/html')
- and is_svn_page(file_contents(filename))):
- # We don't really care about this
- from pip.vcs.subversion import Subversion
- Subversion('svn+' + link.url).unpack(location)
- else:
- ## FIXME: handle?
- ## FIXME: magic signatures?
- logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
- % (filename, location, content_type))
- raise InstallationError('Cannot determine archive format of %s' % location)
-
-
-
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py
deleted file mode 100755
index e110440c..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py
+++ /dev/null
@@ -1,238 +0,0 @@
-"""Handles all VCS (version control) support"""
-
-import os
-import shutil
-import urlparse
-import urllib
-
-from pip.exceptions import BadCommand
-from pip.log import logger
-from pip.util import display_path, backup_dir, find_command, ask
-
-
-__all__ = ['vcs', 'get_src_requirement', 'import_vcs_support']
-
-
-class VcsSupport(object):
- _registry = {}
- schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp']
-
- def __init__(self):
- # Register more schemes with urlparse for various version control systems
- urlparse.uses_netloc.extend(self.schemes)
- urlparse.uses_fragment.extend(self.schemes)
- super(VcsSupport, self).__init__()
-
- def __iter__(self):
- return self._registry.__iter__()
-
- @property
- def backends(self):
- return self._registry.values()
-
- @property
- def dirnames(self):
- return [backend.dirname for backend in self.backends]
-
- @property
- def all_schemes(self):
- schemes = []
- for backend in self.backends:
- schemes.extend(backend.schemes)
- return schemes
-
- def register(self, cls):
- if not hasattr(cls, 'name'):
- logger.warn('Cannot register VCS %s' % cls.__name__)
- return
- if cls.name not in self._registry:
- self._registry[cls.name] = cls
-
- def unregister(self, cls=None, name=None):
- if name in self._registry:
- del self._registry[name]
- elif cls in self._registry.values():
- del self._registry[cls.name]
- else:
- logger.warn('Cannot unregister because no class or name given')
-
- def get_backend_name(self, location):
- """
- Return the name of the version control backend if found at given
- location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
- """
- for vc_type in self._registry.values():
- path = os.path.join(location, vc_type.dirname)
- if os.path.exists(path):
- return vc_type.name
- return None
-
- def get_backend(self, name):
- name = name.lower()
- if name in self._registry:
- return self._registry[name]
-
- def get_backend_from_location(self, location):
- vc_type = self.get_backend_name(location)
- if vc_type:
- return self.get_backend(vc_type)
- return None
-
-
-vcs = VcsSupport()
-
-
-class VersionControl(object):
- name = ''
- dirname = ''
-
- def __init__(self, url=None, *args, **kwargs):
- self.url = url
- self._cmd = None
- super(VersionControl, self).__init__(*args, **kwargs)
-
- def _filter(self, line):
- return (logger.INFO, line)
-
- def _is_local_repository(self, repo):
- """
- posix absolute paths start with os.path.sep,
- win32 ones ones start with drive (like c:\\folder)
- """
- drive, tail = os.path.splitdrive(repo)
- return repo.startswith(os.path.sep) or drive
-
- @property
- def cmd(self):
- if self._cmd is not None:
- return self._cmd
- command = find_command(self.name)
- if command is None:
- raise BadCommand('Cannot find command %r' % self.name)
- logger.info('Found command %r at %r' % (self.name, command))
- self._cmd = command
- return command
-
- def get_url_rev(self):
- """
- Returns the correct repository URL and revision by parsing the given
- repository URL
- """
- url = self.url.split('+', 1)[1]
- scheme, netloc, path, query, frag = urlparse.urlsplit(url)
- rev = None
- if '@' in path:
- path, rev = path.rsplit('@', 1)
- url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
- return url, rev
-
- def get_info(self, location):
- """
- Returns (url, revision), where both are strings
- """
- assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
- return self.get_url(location), self.get_revision(location)
-
- def normalize_url(self, url):
- """
- Normalize a URL for comparison by unquoting it and removing any trailing slash.
- """
- return urllib.unquote(url).rstrip('/')
-
- def compare_urls(self, url1, url2):
- """
- Compare two repo URLs for identity, ignoring incidental differences.
- """
- return (self.normalize_url(url1) == self.normalize_url(url2))
-
- def parse_vcs_bundle_file(self, content):
- """
- Takes the contents of the bundled text file that explains how to revert
- the stripped off version control data of the given package and returns
- the URL and revision of it.
- """
- raise NotImplementedError
-
- def obtain(self, dest):
- """
- Called when installing or updating an editable package, takes the
- source path of the checkout.
- """
- raise NotImplementedError
-
- def switch(self, dest, url, rev_options):
- """
- Switch the repo at ``dest`` to point to ``URL``.
- """
- raise NotImplemented
-
- def update(self, dest, rev_options):
- """
- Update an already-existing repo to the given ``rev_options``.
- """
- raise NotImplementedError
-
- def check_destination(self, dest, url, rev_options, rev_display):
- """
- Prepare a location to receive a checkout/clone.
-
- Return True if the location is ready for (and requires) a
- checkout/clone, False otherwise.
- """
- checkout = True
- prompt = False
- if os.path.exists(dest):
- checkout = False
- if os.path.exists(os.path.join(dest, self.dirname)):
- existing_url = self.get_url(dest)
- if self.compare_urls(existing_url, url):
- logger.info('%s in %s exists, and has correct URL (%s)'
- % (self.repo_name.title(), display_path(dest), url))
- logger.notify('Updating %s %s%s'
- % (display_path(dest), self.repo_name, rev_display))
- self.update(dest, rev_options)
- else:
- logger.warn('%s %s in %s exists with URL %s'
- % (self.name, self.repo_name, display_path(dest), existing_url))
- prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b'))
- else:
- logger.warn('Directory %s already exists, and is not a %s %s.'
- % (dest, self.name, self.repo_name))
- prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
- if prompt:
- logger.warn('The plan is to install the %s repository %s'
- % (self.name, url))
- response = ask('What to do? %s' % prompt[0], prompt[1])
-
- if response == 's':
- logger.notify('Switching %s %s to %s%s'
- % (self.repo_name, display_path(dest), url, rev_display))
- self.switch(dest, url, rev_options)
- elif response == 'i':
- # do nothing
- pass
- elif response == 'w':
- logger.warn('Deleting %s' % display_path(dest))
- shutil.rmtree(dest)
- checkout = True
- elif response == 'b':
- dest_dir = backup_dir(dest)
- logger.warn('Backing up %s to %s'
- % (display_path(dest), dest_dir))
- shutil.move(dest, dest_dir)
- checkout = True
- return checkout
-
- def unpack(self, location):
- raise NotImplementedError
-
- def get_src_requirement(self, dist, location, find_tags=False):
- raise NotImplementedError
-
-
-def get_src_requirement(dist, location, find_tags):
- version_control = vcs.get_backend_from_location(location)
- if version_control:
- return version_control().get_src_requirement(dist, location, find_tags)
- logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
- return dist.as_requirement()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py
deleted file mode 100755
index 3b6ea8f0..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py
+++ /dev/null
@@ -1,138 +0,0 @@
-import os
-import shutil
-import tempfile
-import re
-from pip import call_subprocess
-from pip.log import logger
-from pip.util import rmtree, display_path
-from pip.vcs import vcs, VersionControl
-from pip.download import path_to_url2
-
-
-class Bazaar(VersionControl):
- name = 'bzr'
- dirname = '.bzr'
- repo_name = 'branch'
- bundle_file = 'bzr-branch.txt'
- schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp')
- guide = ('# This was a Bazaar branch; to make it a branch again run:\n'
- 'bzr branch -r %(rev)s %(url)s .\n')
-
- def parse_vcs_bundle_file(self, content):
- url = rev = None
- for line in content.splitlines():
- if not line.strip() or line.strip().startswith('#'):
- continue
- match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line)
- if match:
- rev = match.group(1).strip()
- url = line[match.end():].strip().split(None, 1)[0]
- if url and rev:
- return url, rev
- return None, None
-
- def unpack(self, location):
- """Get the bzr branch at the url to the destination location"""
- url, rev = self.get_url_rev()
- logger.notify('Checking out bzr repository %s to %s' % (url, location))
- logger.indent += 2
- try:
- if os.path.exists(location):
- os.rmdir(location)
- call_subprocess(
- [self.cmd, 'branch', url, location],
- filter_stdout=self._filter, show_stdout=False)
- finally:
- logger.indent -= 2
-
- def export(self, location):
- """Export the Bazaar repository at the url to the destination location"""
- temp_dir = tempfile.mkdtemp('-export', 'pip-')
- self.unpack(temp_dir)
- if os.path.exists(location):
- # Remove the location to make sure Bazaar can export it correctly
- rmtree(location)
- try:
- call_subprocess([self.cmd, 'export', location], cwd=temp_dir,
- filter_stdout=self._filter, show_stdout=False)
- finally:
- shutil.rmtree(temp_dir)
-
- def switch(self, dest, url, rev_options):
- call_subprocess([self.cmd, 'switch', url], cwd=dest)
-
- def update(self, dest, rev_options):
- call_subprocess(
- [self.cmd, 'pull', '-q'] + rev_options, cwd=dest)
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- if rev:
- rev_options = ['-r', rev]
- rev_display = ' (to revision %s)' % rev
- else:
- rev_options = []
- rev_display = ''
- if self.check_destination(dest, url, rev_options, rev_display):
- logger.notify('Checking out %s%s to %s'
- % (url, rev_display, display_path(dest)))
- call_subprocess(
- [self.cmd, 'branch', '-q'] + rev_options + [url, dest])
-
- def get_url_rev(self):
- # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
- url, rev = super(Bazaar, self).get_url_rev()
- if url.startswith('ssh://'):
- url = 'bzr+' + url
- return url, rev
-
- def get_url(self, location):
- urls = call_subprocess(
- [self.cmd, 'info'], show_stdout=False, cwd=location)
- for line in urls.splitlines():
- line = line.strip()
- for x in ('checkout of branch: ',
- 'parent branch: '):
- if line.startswith(x):
- repo = line.split(x)[1]
- if self._is_local_repository(repo):
- return path_to_url2(repo)
- return repo
- return None
-
- def get_revision(self, location):
- revision = call_subprocess(
- [self.cmd, 'revno'], show_stdout=False, cwd=location)
- return revision.splitlines()[-1]
-
- def get_tag_revs(self, location):
- tags = call_subprocess(
- [self.cmd, 'tags'], show_stdout=False, cwd=location)
- tag_revs = []
- for line in tags.splitlines():
- tags_match = re.search(r'([.\w-]+)\s*(.*)$', line)
- if tags_match:
- tag = tags_match.group(1)
- rev = tags_match.group(2)
- tag_revs.append((rev.strip(), tag.strip()))
- return dict(tag_revs)
-
- def get_src_requirement(self, dist, location, find_tags):
- repo = self.get_url(location)
- if not repo.lower().startswith('bzr:'):
- repo = 'bzr+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if not repo:
- return None
- current_rev = self.get_revision(location)
- tag_revs = self.get_tag_revs(location)
-
- if current_rev in tag_revs:
- # It's a tag
- full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
- else:
- full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev)
- return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
-
-
-vcs.register(Bazaar)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py
deleted file mode 100755
index 0701e49e..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py
+++ /dev/null
@@ -1,204 +0,0 @@
-import os
-import shutil
-import tempfile
-import re
-from pip import call_subprocess
-from pip.util import display_path
-from pip.vcs import vcs, VersionControl
-from pip.log import logger
-from urllib import url2pathname
-from urlparse import urlsplit, urlunsplit
-
-
-class Git(VersionControl):
- name = 'git'
- dirname = '.git'
- repo_name = 'clone'
- schemes = ('git', 'git+http', 'git+ssh', 'git+git', 'git+file')
- bundle_file = 'git-clone.txt'
- guide = ('# This was a Git repo; to make it a repo again run:\n'
- 'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n')
-
- def __init__(self, url=None, *args, **kwargs):
-
- # Works around an apparent Git bug
- # (see http://article.gmane.org/gmane.comp.version-control.git/146500)
- if url:
- scheme, netloc, path, query, fragment = urlsplit(url)
- if scheme.endswith('file'):
- initial_slashes = path[:-len(path.lstrip('/'))]
- newpath = initial_slashes + url2pathname(path).replace('\\', '/').lstrip('/')
- url = urlunsplit((scheme, netloc, newpath, query, fragment))
- after_plus = scheme.find('+')+1
- url = scheme[:after_plus]+ urlunsplit((scheme[after_plus:], netloc, newpath, query, fragment))
-
- super(Git, self).__init__(url, *args, **kwargs)
-
- def parse_vcs_bundle_file(self, content):
- url = rev = None
- for line in content.splitlines():
- if not line.strip() or line.strip().startswith('#'):
- continue
- url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line)
- if url_match:
- url = url_match.group(1).strip()
- rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line)
- if rev_match:
- rev = rev_match.group(1).strip()
- if url and rev:
- return url, rev
- return None, None
-
- def unpack(self, location):
- """Clone the Git repository at the url to the destination location"""
- url, rev = self.get_url_rev()
- logger.notify('Cloning Git repository %s to %s' % (url, location))
- logger.indent += 2
- try:
- if os.path.exists(location):
- os.rmdir(location)
- call_subprocess(
- [self.cmd, 'clone', url, location],
- filter_stdout=self._filter, show_stdout=False)
- finally:
- logger.indent -= 2
-
- def export(self, location):
- """Export the Git repository at the url to the destination location"""
- temp_dir = tempfile.mkdtemp('-export', 'pip-')
- self.unpack(temp_dir)
- try:
- if not location.endswith('/'):
- location = location + '/'
- call_subprocess(
- [self.cmd, 'checkout-index', '-a', '-f', '--prefix', location],
- filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
- finally:
- shutil.rmtree(temp_dir)
-
- def check_rev_options(self, rev, dest, rev_options):
- """Check the revision options before checkout to compensate that tags
- and branches may need origin/ as a prefix.
- Returns the SHA1 of the branch or tag if found.
- """
- revisions = self.get_tag_revs(dest)
- revisions.update(self.get_branch_revs(dest))
- inverse_revisions = dict((v, k) for k, v in revisions.iteritems())
- # Check if rev is a branch name
- origin_rev = 'origin/%s' % rev
- if origin_rev in inverse_revisions:
- return [inverse_revisions[origin_rev]]
- elif rev in inverse_revisions:
- return [inverse_revisions[rev]]
- else:
- logger.warn("Could not find a tag or branch '%s', assuming commit." % rev)
- return rev_options
-
- def switch(self, dest, url, rev_options):
- call_subprocess(
- [self.cmd, 'config', 'remote.origin.url', url], cwd=dest)
- call_subprocess(
- [self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
-
- def update(self, dest, rev_options):
- call_subprocess([self.cmd, 'pull', '-q'], cwd=dest)
- call_subprocess(
- [self.cmd, 'checkout', '-q', '-f'] + rev_options, cwd=dest)
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- if rev:
- rev_options = [rev]
- rev_display = ' (to %s)' % rev
- else:
- rev_options = ['master']
- rev_display = ''
- if self.check_destination(dest, url, rev_options, rev_display):
- logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest)))
- call_subprocess([self.cmd, 'clone', '-q', url, dest])
- if rev:
- rev_options = self.check_rev_options(rev, dest, rev_options)
- # Only do a checkout if rev_options differs from HEAD
- if not self.get_revision(dest).startswith(rev_options[0]):
- call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
-
- def get_url(self, location):
- url = call_subprocess(
- [self.cmd, 'config', 'remote.origin.url'],
- show_stdout=False, cwd=location)
- return url.strip()
-
- def get_revision(self, location):
- current_rev = call_subprocess(
- [self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location)
- return current_rev.strip()
-
- def get_tag_revs(self, location):
- tags = call_subprocess(
- [self.cmd, 'tag', '-l'],
- show_stdout=False, raise_on_returncode=False, cwd=location)
- tag_revs = []
- for line in tags.splitlines():
- tag = line.strip()
- rev = call_subprocess(
- [self.cmd, 'rev-parse', tag], show_stdout=False, cwd=location)
- tag_revs.append((rev.strip(), tag))
- tag_revs = dict(tag_revs)
- return tag_revs
-
- def get_branch_revs(self, location):
- branches = call_subprocess(
- [self.cmd, 'branch', '-r'], show_stdout=False, cwd=location)
- branch_revs = []
- for line in branches.splitlines():
- line = line.split('->')[0].strip()
- branch = "".join([b for b in line.split() if b != '*'])
- rev = call_subprocess(
- [self.cmd, 'rev-parse', branch], show_stdout=False, cwd=location)
- branch_revs.append((rev.strip(), branch))
- branch_revs = dict(branch_revs)
- return branch_revs
-
- def get_src_requirement(self, dist, location, find_tags):
- repo = self.get_url(location)
- if not repo.lower().startswith('git:'):
- repo = 'git+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if not repo:
- return None
- current_rev = self.get_revision(location)
- tag_revs = self.get_tag_revs(location)
- branch_revs = self.get_branch_revs(location)
-
- if current_rev in tag_revs:
- # It's a tag
- full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
- elif (current_rev in branch_revs and
- branch_revs[current_rev] != 'origin/master'):
- # It's the head of a branch
- full_egg_name = '%s-%s' % (dist.egg_name(),
- branch_revs[current_rev].replace('origin/', ''))
- else:
- full_egg_name = '%s-dev' % dist.egg_name()
-
- return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
-
- def get_url_rev(self):
- """
- Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
- That's required because although they use SSH they sometimes doesn't
- work with a ssh:// scheme (e.g. Github). But we need a scheme for
- parsing. Hence we remove it again afterwards and return it as a stub.
- """
- if not '://' in self.url:
- assert not 'file:' in self.url
- self.url = self.url.replace('git+', 'git+ssh://')
- url, rev = super(Git, self).get_url_rev()
- url = url.replace('ssh://', '')
- else:
- url, rev = super(Git, self).get_url_rev()
-
- return url, rev
-
-
-vcs.register(Git)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py
deleted file mode 100755
index 70c8c833..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py
+++ /dev/null
@@ -1,162 +0,0 @@
-import os
-import shutil
-import tempfile
-import re
-import ConfigParser
-from pip import call_subprocess
-from pip.util import display_path
-from pip.log import logger
-from pip.vcs import vcs, VersionControl
-from pip.download import path_to_url2
-
-
-class Mercurial(VersionControl):
- name = 'hg'
- dirname = '.hg'
- repo_name = 'clone'
- schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
- bundle_file = 'hg-clone.txt'
- guide = ('# This was a Mercurial repo; to make it a repo again run:\n'
- 'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n')
-
- def parse_vcs_bundle_file(self, content):
- url = rev = None
- for line in content.splitlines():
- if not line.strip() or line.strip().startswith('#'):
- continue
- url_match = re.search(r'hg\s*pull\s*(.*)\s*', line)
- if url_match:
- url = url_match.group(1).strip()
- rev_match = re.search(r'^hg\s*update\s*-r\s*(.*)\s*', line)
- if rev_match:
- rev = rev_match.group(1).strip()
- if url and rev:
- return url, rev
- return None, None
-
- def unpack(self, location):
- """Clone the Hg repository at the url to the destination location"""
- url, rev = self.get_url_rev()
- logger.notify('Cloning Mercurial repository %s to %s' % (url, location))
- logger.indent += 2
- try:
- if os.path.exists(location):
- os.rmdir(location)
- call_subprocess(
- [self.cmd, 'clone', url, location],
- filter_stdout=self._filter, show_stdout=False)
- finally:
- logger.indent -= 2
-
- def export(self, location):
- """Export the Hg repository at the url to the destination location"""
- temp_dir = tempfile.mkdtemp('-export', 'pip-')
- self.unpack(temp_dir)
- try:
- call_subprocess(
- [self.cmd, 'archive', location],
- filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
- finally:
- shutil.rmtree(temp_dir)
-
- def switch(self, dest, url, rev_options):
- repo_config = os.path.join(dest, self.dirname, 'hgrc')
- config = ConfigParser.SafeConfigParser()
- try:
- config.read(repo_config)
- config.set('paths', 'default', url)
- config_file = open(repo_config, 'w')
- config.write(config_file)
- config_file.close()
- except (OSError, ConfigParser.NoSectionError), e:
- logger.warn(
- 'Could not switch Mercurial repository to %s: %s'
- % (url, e))
- else:
- call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
-
- def update(self, dest, rev_options):
- call_subprocess([self.cmd, 'pull', '-q'], cwd=dest)
- call_subprocess(
- [self.cmd, 'update', '-q'] + rev_options, cwd=dest)
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- if rev:
- rev_options = [rev]
- rev_display = ' (to revision %s)' % rev
- else:
- rev_options = []
- rev_display = ''
- if self.check_destination(dest, url, rev_options, rev_display):
- logger.notify('Cloning hg %s%s to %s'
- % (url, rev_display, display_path(dest)))
- call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest])
- call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
-
- def get_url(self, location):
- url = call_subprocess(
- [self.cmd, 'showconfig', 'paths.default'],
- show_stdout=False, cwd=location).strip()
- if self._is_local_repository(url):
- url = path_to_url2(url)
- return url.strip()
-
- def get_tag_revs(self, location):
- tags = call_subprocess(
- [self.cmd, 'tags'], show_stdout=False, cwd=location)
- tag_revs = []
- for line in tags.splitlines():
- tags_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
- if tags_match:
- tag = tags_match.group(1)
- rev = tags_match.group(2)
- tag_revs.append((rev.strip(), tag.strip()))
- return dict(tag_revs)
-
- def get_branch_revs(self, location):
- branches = call_subprocess(
- [self.cmd, 'branches'], show_stdout=False, cwd=location)
- branch_revs = []
- for line in branches.splitlines():
- branches_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
- if branches_match:
- branch = branches_match.group(1)
- rev = branches_match.group(2)
- branch_revs.append((rev.strip(), branch.strip()))
- return dict(branch_revs)
-
- def get_revision(self, location):
- current_revision = call_subprocess(
- [self.cmd, 'parents', '--template={rev}'],
- show_stdout=False, cwd=location).strip()
- return current_revision
-
- def get_revision_hash(self, location):
- current_rev_hash = call_subprocess(
- [self.cmd, 'parents', '--template={node}'],
- show_stdout=False, cwd=location).strip()
- return current_rev_hash
-
- def get_src_requirement(self, dist, location, find_tags):
- repo = self.get_url(location)
- if not repo.lower().startswith('hg:'):
- repo = 'hg+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if not repo:
- return None
- current_rev = self.get_revision(location)
- current_rev_hash = self.get_revision_hash(location)
- tag_revs = self.get_tag_revs(location)
- branch_revs = self.get_branch_revs(location)
- if current_rev in tag_revs:
- # It's a tag
- full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
- elif current_rev in branch_revs:
- # It's the tip of a branch
- full_egg_name = '%s-%s' % (dist.egg_name(), branch_revs[current_rev])
- else:
- full_egg_name = '%s-dev' % dist.egg_name()
- return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name)
-
-vcs.register(Mercurial)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py
deleted file mode 100755
index 85715d97..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py
+++ /dev/null
@@ -1,260 +0,0 @@
-import os
-import re
-from pip import call_subprocess
-from pip.index import Link
-from pip.util import rmtree, display_path
-from pip.log import logger
-from pip.vcs import vcs, VersionControl
-
-_svn_xml_url_re = re.compile('url="([^"]+)"')
-_svn_rev_re = re.compile('committed-rev="(\d+)"')
-_svn_url_re = re.compile(r'URL: (.+)')
-_svn_revision_re = re.compile(r'Revision: (.+)')
-
-
-class Subversion(VersionControl):
- name = 'svn'
- dirname = '.svn'
- repo_name = 'checkout'
- schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https')
- bundle_file = 'svn-checkout.txt'
- guide = ('# This was an svn checkout; to make it a checkout again run:\n'
- 'svn checkout --force -r %(rev)s %(url)s .\n')
-
- def get_info(self, location):
- """Returns (url, revision), where both are strings"""
- assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
- output = call_subprocess(
- [self.cmd, 'info', location], show_stdout=False, extra_environ={'LANG': 'C'})
- match = _svn_url_re.search(output)
- if not match:
- logger.warn('Cannot determine URL of svn checkout %s' % display_path(location))
- logger.info('Output that cannot be parsed: \n%s' % output)
- return None, None
- url = match.group(1).strip()
- match = _svn_revision_re.search(output)
- if not match:
- logger.warn('Cannot determine revision of svn checkout %s' % display_path(location))
- logger.info('Output that cannot be parsed: \n%s' % output)
- return url, None
- return url, match.group(1)
-
- def parse_vcs_bundle_file(self, content):
- for line in content.splitlines():
- if not line.strip() or line.strip().startswith('#'):
- continue
- match = re.search(r'^-r\s*([^ ])?', line)
- if not match:
- return None, None
- rev = match.group(1)
- rest = line[match.end():].strip().split(None, 1)[0]
- return rest, rev
- return None, None
-
- def unpack(self, location):
- """Check out the svn repository at the url to the destination location"""
- url, rev = self.get_url_rev()
- logger.notify('Checking out svn repository %s to %s' % (url, location))
- logger.indent += 2
- try:
- if os.path.exists(location):
- # Subversion doesn't like to check out over an existing directory
- # --force fixes this, but was only added in svn 1.5
- rmtree(location)
- call_subprocess(
- [self.cmd, 'checkout', url, location],
- filter_stdout=self._filter, show_stdout=False)
- finally:
- logger.indent -= 2
-
- def export(self, location):
- """Export the svn repository at the url to the destination location"""
- url, rev = self.get_url_rev()
- logger.notify('Exporting svn repository %s to %s' % (url, location))
- logger.indent += 2
- try:
- if os.path.exists(location):
- # Subversion doesn't like to check out over an existing directory
- # --force fixes this, but was only added in svn 1.5
- rmtree(location)
- call_subprocess(
- [self.cmd, 'export', url, location],
- filter_stdout=self._filter, show_stdout=False)
- finally:
- logger.indent -= 2
-
- def switch(self, dest, url, rev_options):
- call_subprocess(
- [self.cmd, 'switch'] + rev_options + [url, dest])
-
- def update(self, dest, rev_options):
- call_subprocess(
- [self.cmd, 'update'] + rev_options + [dest])
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- if rev:
- rev_options = ['-r', rev]
- rev_display = ' (to revision %s)' % rev
- else:
- rev_options = []
- rev_display = ''
- if self.check_destination(dest, url, rev_options, rev_display):
- logger.notify('Checking out %s%s to %s'
- % (url, rev_display, display_path(dest)))
- call_subprocess(
- [self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
-
- def get_location(self, dist, dependency_links):
- for url in dependency_links:
- egg_fragment = Link(url).egg_fragment
- if not egg_fragment:
- continue
- if '-' in egg_fragment:
- ## FIXME: will this work when a package has - in the name?
- key = '-'.join(egg_fragment.split('-')[:-1]).lower()
- else:
- key = egg_fragment
- if key == dist.key:
- return url.split('#', 1)[0]
- return None
-
- def get_revision(self, location):
- """
- Return the maximum revision for all files under a given location
- """
- # Note: taken from setuptools.command.egg_info
- revision = 0
-
- for base, dirs, files in os.walk(location):
- if self.dirname not in dirs:
- dirs[:] = []
- continue # no sense walking uncontrolled subdirs
- dirs.remove(self.dirname)
- entries_fn = os.path.join(base, self.dirname, 'entries')
- if not os.path.exists(entries_fn):
- ## FIXME: should we warn?
- continue
- f = open(entries_fn)
- data = f.read()
- f.close()
-
- if data.startswith('8') or data.startswith('9') or data.startswith('10'):
- data = map(str.splitlines, data.split('\n\x0c\n'))
- del data[0][0] # get rid of the '8'
- dirurl = data[0][3]
- revs = [int(d[9]) for d in data if len(d)>9 and d[9]]+[0]
- if revs:
- localrev = max(revs)
- else:
- localrev = 0
- elif data.startswith('<?xml'):
- dirurl = _svn_xml_url_re.search(data).group(1) # get repository URL
- revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)]+[0]
- if revs:
- localrev = max(revs)
- else:
- localrev = 0
- else:
- logger.warn("Unrecognized .svn/entries format; skipping %s", base)
- dirs[:] = []
- continue
- if base == location:
- base_url = dirurl+'/' # save the root url
- elif not dirurl.startswith(base_url):
- dirs[:] = []
- continue # not part of the same svn tree, skip it
- revision = max(revision, localrev)
- return revision
-
- def get_url_rev(self):
- # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
- url, rev = super(Subversion, self).get_url_rev()
- if url.startswith('ssh://'):
- url = 'svn+' + url
- return url, rev
-
- def get_url(self, location):
- # In cases where the source is in a subdirectory, not alongside setup.py
- # we have to look up in the location until we find a real setup.py
- orig_location = location
- while not os.path.exists(os.path.join(location, 'setup.py')):
- last_location = location
- location = os.path.dirname(location)
- if location == last_location:
- # We've traversed up to the root of the filesystem without finding setup.py
- logger.warn("Could not find setup.py for directory %s (tried all parent directories)"
- % orig_location)
- return None
- f = open(os.path.join(location, self.dirname, 'entries'))
- data = f.read()
- f.close()
- if data.startswith('8') or data.startswith('9') or data.startswith('10'):
- data = map(str.splitlines, data.split('\n\x0c\n'))
- del data[0][0] # get rid of the '8'
- return data[0][3]
- elif data.startswith('<?xml'):
- match = _svn_xml_url_re.search(data)
- if not match:
- raise ValueError('Badly formatted data: %r' % data)
- return match.group(1) # get repository URL
- else:
- logger.warn("Unrecognized .svn/entries format in %s" % location)
- # Or raise exception?
- return None
-
- def get_tag_revs(self, svn_tag_url):
- stdout = call_subprocess(
- [self.cmd, 'ls', '-v', svn_tag_url], show_stdout=False)
- results = []
- for line in stdout.splitlines():
- parts = line.split()
- rev = int(parts[0])
- tag = parts[-1].strip('/')
- results.append((tag, rev))
- return results
-
- def find_tag_match(self, rev, tag_revs):
- best_match_rev = None
- best_tag = None
- for tag, tag_rev in tag_revs:
- if (tag_rev > rev and
- (best_match_rev is None or best_match_rev > tag_rev)):
- # FIXME: Is best_match > tag_rev really possible?
- # or is it a sign something is wacky?
- best_match_rev = tag_rev
- best_tag = tag
- return best_tag
-
- def get_src_requirement(self, dist, location, find_tags=False):
- repo = self.get_url(location)
- if repo is None:
- return None
- parts = repo.split('/')
- ## FIXME: why not project name?
- egg_project_name = dist.egg_name().split('-', 1)[0]
- rev = self.get_revision(location)
- if parts[-2] in ('tags', 'tag'):
- # It's a tag, perfect!
- full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
- elif parts[-2] in ('branches', 'branch'):
- # It's a branch :(
- full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
- elif parts[-1] == 'trunk':
- # Trunk :-/
- full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
- if find_tags:
- tag_url = '/'.join(parts[:-1]) + '/tags'
- tag_revs = self.get_tag_revs(tag_url)
- match = self.find_tag_match(rev, tag_revs)
- if match:
- logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
- repo = '%s/%s' % (tag_url, match)
- full_egg_name = '%s-%s' % (egg_project_name, match)
- else:
- # Don't know what it is
- logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
- full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
- return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
-
-vcs.register(Subversion)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py
deleted file mode 100755
index 708abb05..00000000
--- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""Tools for working with virtualenv environments"""
-
-import os
-import sys
-import subprocess
-from pip.exceptions import BadCommand
-from pip.log import logger
-
-
-def restart_in_venv(venv, base, site_packages, args):
- """
- Restart this script using the interpreter in the given virtual environment
- """
- if base and not os.path.isabs(venv) and not venv.startswith('~'):
- base = os.path.expanduser(base)
- # ensure we have an abs basepath at this point:
- # a relative one makes no sense (or does it?)
- if os.path.isabs(base):
- venv = os.path.join(base, venv)
-
- if venv.startswith('~'):
- venv = os.path.expanduser(venv)
-
- if not os.path.exists(venv):
- try:
- import virtualenv
- except ImportError:
- print 'The virtual environment does not exist: %s' % venv
- print 'and virtualenv is not installed, so a new environment cannot be created'
- sys.exit(3)
- print 'Creating new virtualenv environment in %s' % venv
- virtualenv.logger = logger
- logger.indent += 2
- virtualenv.create_environment(venv, site_packages=site_packages)
- if sys.platform == 'win32':
- python = os.path.join(venv, 'Scripts', 'python.exe')
- # check for bin directory which is used in buildouts
- if not os.path.exists(python):
- python = os.path.join(venv, 'bin', 'python.exe')
- else:
- python = os.path.join(venv, 'bin', 'python')
- if not os.path.exists(python):
- python = venv
- if not os.path.exists(python):
- raise BadCommand('Cannot find virtual environment interpreter at %s' % python)
- base = os.path.dirname(os.path.dirname(python))
- file = os.path.join(os.path.dirname(__file__), 'runner.py')
- if file.endswith('.pyc'):
- file = file[:-1]
- proc = subprocess.Popen(
- [python, file] + args + [base, '___VENV_RESTART___'])
- proc.wait()
- sys.exit(proc.returncode)
diff --git a/lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info b/lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info
deleted file mode 100644
index 0e358148..00000000
--- a/lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info
+++ /dev/null
@@ -1,9 +0,0 @@
-Metadata-Version: 1.0
-Name: setuptools
-Version: 0.6c11
-Summary: xxxx
-Home-page: xxx
-Author: xxx
-Author-email: xxx
-License: xxx
-Description: xxx
diff --git a/lib/python2.7/site-packages/setuptools.pth b/lib/python2.7/site-packages/setuptools.pth
deleted file mode 100644
index 0f744252..00000000
--- a/lib/python2.7/site-packages/setuptools.pth
+++ /dev/null
@@ -1 +0,0 @@
-./distribute-0.6.14-py2.7.egg
diff --git a/lib/python2.7/site.py b/lib/python2.7/site.py
deleted file mode 100644
index a49cfc34..00000000
--- a/lib/python2.7/site.py
+++ /dev/null
@@ -1,713 +0,0 @@
-"""Append module search paths for third-party packages to sys.path.
-
-****************************************************************
-* This module is automatically imported during initialization. *
-****************************************************************
-
-In earlier versions of Python (up to 1.5a3), scripts or modules that
-needed to use site-specific modules would place ``import site''
-somewhere near the top of their code. Because of the automatic
-import, this is no longer necessary (but code that does it still
-works).
-
-This will append site-specific paths to the module search path. On
-Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
-appends lib/python<version>/site-packages as well as lib/site-python.
-It also supports the Debian convention of
-lib/python<version>/dist-packages. On other platforms (mainly Mac and
-Windows), it uses just sys.prefix (and sys.exec_prefix, if different,
-but this is unlikely). The resulting directories, if they exist, are
-appended to sys.path, and also inspected for path configuration files.
-
-FOR DEBIAN, this sys.path is augmented with directories in /usr/local.
-Local addons go into /usr/local/lib/python<version>/site-packages
-(resp. /usr/local/lib/site-python), Debian addons install into
-/usr/{lib,share}/python<version>/dist-packages.
-
-A path configuration file is a file whose name has the form
-<package>.pth; its contents are additional directories (one per line)
-to be added to sys.path. Non-existing directories (or
-non-directories) are never added to sys.path; no directory is added to
-sys.path more than once. Blank lines and lines beginning with
-'#' are skipped. Lines starting with 'import' are executed.
-
-For example, suppose sys.prefix and sys.exec_prefix are set to
-/usr/local and there is a directory /usr/local/lib/python2.X/site-packages
-with three subdirectories, foo, bar and spam, and two path
-configuration files, foo.pth and bar.pth. Assume foo.pth contains the
-following:
-
- # foo package configuration
- foo
- bar
- bletch
-
-and bar.pth contains:
-
- # bar package configuration
- bar
-
-Then the following directories are added to sys.path, in this order:
-
- /usr/local/lib/python2.X/site-packages/bar
- /usr/local/lib/python2.X/site-packages/foo
-
-Note that bletch is omitted because it doesn't exist; bar precedes foo
-because bar.pth comes alphabetically before foo.pth; and spam is
-omitted because it is not mentioned in either path configuration file.
-
-After these path manipulations, an attempt is made to import a module
-named sitecustomize, which can perform arbitrary additional
-site-specific customizations. If this import fails with an
-ImportError exception, it is silently ignored.
-
-"""
-
-import sys
-import os
-import __builtin__
-try:
- set
-except NameError:
- from sets import Set as set
-
-# Prefixes for site-packages; add additional prefixes like /usr/local here
-PREFIXES = [sys.prefix, sys.exec_prefix]
-# Enable per user site-packages directory
-# set it to False to disable the feature or True to force the feature
-ENABLE_USER_SITE = None
-# for distutils.commands.install
-USER_SITE = None
-USER_BASE = None
-
-_is_pypy = hasattr(sys, 'pypy_version_info')
-_is_jython = sys.platform[:4] == 'java'
-if _is_jython:
- ModuleType = type(os)
-
-def makepath(*paths):
- dir = os.path.join(*paths)
- if _is_jython and (dir == '__classpath__' or
- dir.startswith('__pyclasspath__')):
- return dir, dir
- dir = os.path.abspath(dir)
- return dir, os.path.normcase(dir)
-
-def abs__file__():
- """Set all module' __file__ attribute to an absolute path"""
- for m in sys.modules.values():
- if ((_is_jython and not isinstance(m, ModuleType)) or
- hasattr(m, '__loader__')):
- # only modules need the abspath in Jython. and don't mess
- # with a PEP 302-supplied __file__
- continue
- f = getattr(m, '__file__', None)
- if f is None:
- continue
- m.__file__ = os.path.abspath(f)
-
-def removeduppaths():
- """ Remove duplicate entries from sys.path along with making them
- absolute"""
- # This ensures that the initial path provided by the interpreter contains
- # only absolute pathnames, even if we're running from the build directory.
- L = []
- known_paths = set()
- for dir in sys.path:
- # Filter out duplicate paths (on case-insensitive file systems also
- # if they only differ in case); turn relative paths into absolute
- # paths.
- dir, dircase = makepath(dir)
- if not dircase in known_paths:
- L.append(dir)
- known_paths.add(dircase)
- sys.path[:] = L
- return known_paths
-
-# XXX This should not be part of site.py, since it is needed even when
-# using the -S option for Python. See http://www.python.org/sf/586680
-def addbuilddir():
- """Append ./build/lib.<platform> in case we're running in the build dir
- (especially for Guido :-)"""
- from distutils.util import get_platform
- s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
- if hasattr(sys, 'gettotalrefcount'):
- s += '-pydebug'
- s = os.path.join(os.path.dirname(sys.path[-1]), s)
- sys.path.append(s)
-
-def _init_pathinfo():
- """Return a set containing all existing directory entries from sys.path"""
- d = set()
- for dir in sys.path:
- try:
- if os.path.isdir(dir):
- dir, dircase = makepath(dir)
- d.add(dircase)
- except TypeError:
- continue
- return d
-
-def addpackage(sitedir, name, known_paths):
- """Add a new path to known_paths by combining sitedir and 'name' or execute
- sitedir if it starts with 'import'"""
- if known_paths is None:
- _init_pathinfo()
- reset = 1
- else:
- reset = 0
- fullname = os.path.join(sitedir, name)
- try:
- f = open(fullname, "rU")
- except IOError:
- return
- try:
- for line in f:
- if line.startswith("#"):
- continue
- if line.startswith("import"):
- exec line
- continue
- line = line.rstrip()
- dir, dircase = makepath(sitedir, line)
- if not dircase in known_paths and os.path.exists(dir):
- sys.path.append(dir)
- known_paths.add(dircase)
- finally:
- f.close()
- if reset:
- known_paths = None
- return known_paths
-
-def addsitedir(sitedir, known_paths=None):
- """Add 'sitedir' argument to sys.path if missing and handle .pth files in
- 'sitedir'"""
- if known_paths is None:
- known_paths = _init_pathinfo()
- reset = 1
- else:
- reset = 0
- sitedir, sitedircase = makepath(sitedir)
- if not sitedircase in known_paths:
- sys.path.append(sitedir) # Add path component
- try:
- names = os.listdir(sitedir)
- except os.error:
- return
- names.sort()
- for name in names:
- if name.endswith(os.extsep + "pth"):
- addpackage(sitedir, name, known_paths)
- if reset:
- known_paths = None
- return known_paths
-
-def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):
- """Add site-packages (and possibly site-python) to sys.path"""
- prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]
- if exec_prefix != sys_prefix:
- prefixes.append(os.path.join(exec_prefix, "local"))
-
- for prefix in prefixes:
- if prefix:
- if sys.platform in ('os2emx', 'riscos') or _is_jython:
- sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
- elif _is_pypy:
- sitedirs = [os.path.join(prefix, 'site-packages')]
- elif sys.platform == 'darwin' and prefix == sys_prefix:
-
- if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python
-
- sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
- os.path.join(prefix, "Extras", "lib", "python")]
-
- else: # any other Python distros on OSX work this way
- sitedirs = [os.path.join(prefix, "lib",
- "python" + sys.version[:3], "site-packages")]
-
- elif os.sep == '/':
- sitedirs = [os.path.join(prefix,
- "lib",
- "python" + sys.version[:3],
- "site-packages"),
- os.path.join(prefix, "lib", "site-python"),
- os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
- lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
- if (os.path.exists(lib64_dir) and
- os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
- sitedirs.append(lib64_dir)
- try:
- # sys.getobjects only available in --with-pydebug build
- sys.getobjects
- sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
- except AttributeError:
- pass
- # Debian-specific dist-packages directories:
- sitedirs.append(os.path.join(prefix, "lib",
- "python" + sys.version[:3],
- "dist-packages"))
- sitedirs.append(os.path.join(prefix, "local/lib",
- "python" + sys.version[:3],
- "dist-packages"))
- sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
- else:
- sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
- if sys.platform == 'darwin':
- # for framework builds *only* we add the standard Apple
- # locations. Currently only per-user, but /Library and
- # /Network/Library could be added too
- if 'Python.framework' in prefix:
- home = os.environ.get('HOME')
- if home:
- sitedirs.append(
- os.path.join(home,
- 'Library',
- 'Python',
- sys.version[:3],
- 'site-packages'))
- for sitedir in sitedirs:
- if os.path.isdir(sitedir):
- addsitedir(sitedir, known_paths)
- return None
-
-def check_enableusersite():
- """Check if user site directory is safe for inclusion
-
- The function tests for the command line flag (including environment var),
- process uid/gid equal to effective uid/gid.
-
- None: Disabled for security reasons
- False: Disabled by user (command line option)
- True: Safe and enabled
- """
- if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):
- return False
-
- if hasattr(os, "getuid") and hasattr(os, "geteuid"):
- # check process uid == effective uid
- if os.geteuid() != os.getuid():
- return None
- if hasattr(os, "getgid") and hasattr(os, "getegid"):
- # check process gid == effective gid
- if os.getegid() != os.getgid():
- return None
-
- return True
-
-def addusersitepackages(known_paths):
- """Add a per user site-package to sys.path
-
- Each user has its own python directory with site-packages in the
- home directory.
-
- USER_BASE is the root directory for all Python versions
-
- USER_SITE is the user specific site-packages directory
-
- USER_SITE/.. can be used for data.
- """
- global USER_BASE, USER_SITE, ENABLE_USER_SITE
- env_base = os.environ.get("PYTHONUSERBASE", None)
-
- def joinuser(*args):
- return os.path.expanduser(os.path.join(*args))
-
- #if sys.platform in ('os2emx', 'riscos'):
- # # Don't know what to put here
- # USER_BASE = ''
- # USER_SITE = ''
- if os.name == "nt":
- base = os.environ.get("APPDATA") or "~"
- if env_base:
- USER_BASE = env_base
- else:
- USER_BASE = joinuser(base, "Python")
- USER_SITE = os.path.join(USER_BASE,
- "Python" + sys.version[0] + sys.version[2],
- "site-packages")
- else:
- if env_base:
- USER_BASE = env_base
- else:
- USER_BASE = joinuser("~", ".local")
- USER_SITE = os.path.join(USER_BASE, "lib",
- "python" + sys.version[:3],
- "site-packages")
-
- if ENABLE_USER_SITE and os.path.isdir(USER_SITE):
- addsitedir(USER_SITE, known_paths)
- if ENABLE_USER_SITE:
- for dist_libdir in ("lib", "local/lib"):
- user_site = os.path.join(USER_BASE, dist_libdir,
- "python" + sys.version[:3],
- "dist-packages")
- if os.path.isdir(user_site):
- addsitedir(user_site, known_paths)
- return known_paths
-
-
-
-def setBEGINLIBPATH():
- """The OS/2 EMX port has optional extension modules that do double duty
- as DLLs (and must use the .DLL file extension) for other extensions.
- The library search path needs to be amended so these will be found
- during module import. Use BEGINLIBPATH so that these are at the start
- of the library search path.
-
- """
- dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
- libpath = os.environ['BEGINLIBPATH'].split(';')
- if libpath[-1]:
- libpath.append(dllpath)
- else:
- libpath[-1] = dllpath
- os.environ['BEGINLIBPATH'] = ';'.join(libpath)
-
-
-def setquit():
- """Define new built-ins 'quit' and 'exit'.
- These are simply strings that display a hint on how to exit.
-
- """
- if os.sep == ':':
- eof = 'Cmd-Q'
- elif os.sep == '\\':
- eof = 'Ctrl-Z plus Return'
- else:
- eof = 'Ctrl-D (i.e. EOF)'
-
- class Quitter(object):
- def __init__(self, name):
- self.name = name
- def __repr__(self):
- return 'Use %s() or %s to exit' % (self.name, eof)
- def __call__(self, code=None):
- # Shells like IDLE catch the SystemExit, but listen when their
- # stdin wrapper is closed.
- try:
- sys.stdin.close()
- except:
- pass
- raise SystemExit(code)
- __builtin__.quit = Quitter('quit')
- __builtin__.exit = Quitter('exit')
-
-
-class _Printer(object):
- """interactive prompt objects for printing the license text, a list of
- contributors and the copyright notice."""
-
- MAXLINES = 23
-
- def __init__(self, name, data, files=(), dirs=()):
- self.__name = name
- self.__data = data
- self.__files = files
- self.__dirs = dirs
- self.__lines = None
-
- def __setup(self):
- if self.__lines:
- return
- data = None
- for dir in self.__dirs:
- for filename in self.__files:
- filename = os.path.join(dir, filename)
- try:
- fp = file(filename, "rU")
- data = fp.read()
- fp.close()
- break
- except IOError:
- pass
- if data:
- break
- if not data:
- data = self.__data
- self.__lines = data.split('\n')
- self.__linecnt = len(self.__lines)
-
- def __repr__(self):
- self.__setup()
- if len(self.__lines) <= self.MAXLINES:
- return "\n".join(self.__lines)
- else:
- return "Type %s() to see the full %s text" % ((self.__name,)*2)
-
- def __call__(self):
- self.__setup()
- prompt = 'Hit Return for more, or q (and Return) to quit: '
- lineno = 0
- while 1:
- try:
- for i in range(lineno, lineno + self.MAXLINES):
- print self.__lines[i]
- except IndexError:
- break
- else:
- lineno += self.MAXLINES
- key = None
- while key is None:
- key = raw_input(prompt)
- if key not in ('', 'q'):
- key = None
- if key == 'q':
- break
-
-def setcopyright():
- """Set 'copyright' and 'credits' in __builtin__"""
- __builtin__.copyright = _Printer("copyright", sys.copyright)
- if _is_jython:
- __builtin__.credits = _Printer(
- "credits",
- "Jython is maintained by the Jython developers (www.jython.org).")
- elif _is_pypy:
- __builtin__.credits = _Printer(
- "credits",
- "PyPy is maintained by the PyPy developers: http://codespeak.net/pypy")
- else:
- __builtin__.credits = _Printer("credits", """\
- Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
- for supporting Python development. See www.python.org for more information.""")
- here = os.path.dirname(os.__file__)
- __builtin__.license = _Printer(
- "license", "See http://www.python.org/%.3s/license.html" % sys.version,
- ["LICENSE.txt", "LICENSE"],
- [os.path.join(here, os.pardir), here, os.curdir])
-
-
-class _Helper(object):
- """Define the built-in 'help'.
- This is a wrapper around pydoc.help (with a twist).
-
- """
-
- def __repr__(self):
- return "Type help() for interactive help, " \
- "or help(object) for help about object."
- def __call__(self, *args, **kwds):
- import pydoc
- return pydoc.help(*args, **kwds)
-
-def sethelper():
- __builtin__.help = _Helper()
-
-def aliasmbcs():
- """On Windows, some default encodings are not provided by Python,
- while they are always available as "mbcs" in each locale. Make
- them usable by aliasing to "mbcs" in such a case."""
- if sys.platform == 'win32':
- import locale, codecs
- enc = locale.getdefaultlocale()[1]
- if enc.startswith('cp'): # "cp***" ?
- try:
- codecs.lookup(enc)
- except LookupError:
- import encodings
- encodings._cache[enc] = encodings._unknown
- encodings.aliases.aliases[enc] = 'mbcs'
-
-def setencoding():
- """Set the string encoding used by the Unicode implementation. The
- default is 'ascii', but if you're willing to experiment, you can
- change this."""
- encoding = "ascii" # Default value set by _PyUnicode_Init()
- if 0:
- # Enable to support locale aware default string encodings.
- import locale
- loc = locale.getdefaultlocale()
- if loc[1]:
- encoding = loc[1]
- if 0:
- # Enable to switch off string to Unicode coercion and implicit
- # Unicode to string conversion.
- encoding = "undefined"
- if encoding != "ascii":
- # On Non-Unicode builds this will raise an AttributeError...
- sys.setdefaultencoding(encoding) # Needs Python Unicode build !
-
-
-def execsitecustomize():
- """Run custom site specific code, if available."""
- try:
- import sitecustomize
- except ImportError:
- pass
-
-def virtual_install_main_packages():
- f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))
- sys.real_prefix = f.read().strip()
- f.close()
- pos = 2
- if sys.path[0] == '':
- pos += 1
- if sys.platform == 'win32':
- paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]
- elif _is_jython:
- paths = [os.path.join(sys.real_prefix, 'Lib')]
- elif _is_pypy:
- cpyver = '%d.%d.%d' % sys.version_info[:3]
- paths = [os.path.join(sys.real_prefix, 'lib_pypy'),
- os.path.join(sys.real_prefix, 'lib-python', 'modified-%s' % cpyver),
- os.path.join(sys.real_prefix, 'lib-python', cpyver)]
- else:
- paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]
- lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
- if os.path.exists(lib64_path):
- paths.append(lib64_path)
- # This is hardcoded in the Python executable, but relative to sys.prefix:
- plat_path = os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3],
- 'plat-%s' % sys.platform)
- if os.path.exists(plat_path):
- paths.append(plat_path)
- # This is hardcoded in the Python executable, but
- # relative to sys.prefix, so we have to fix up:
- for path in list(paths):
- tk_dir = os.path.join(path, 'lib-tk')
- if os.path.exists(tk_dir):
- paths.append(tk_dir)
-
- # These are hardcoded in the Apple's Python executable,
- # but relative to sys.prefix, so we have to fix them up:
- if sys.platform == 'darwin':
- hardcoded_paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3], module)
- for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]
-
- for path in hardcoded_paths:
- if os.path.exists(path):
- paths.append(path)
-
- sys.path.extend(paths)
-
-def force_global_eggs_after_local_site_packages():
- """
- Force easy_installed eggs in the global environment to get placed
- in sys.path after all packages inside the virtualenv. This
- maintains the "least surprise" result that packages in the
- virtualenv always mask global packages, never the other way
- around.
-
- """
- egginsert = getattr(sys, '__egginsert', 0)
- for i, path in enumerate(sys.path):
- if i > egginsert and path.startswith(sys.prefix):
- egginsert = i
- sys.__egginsert = egginsert + 1
-
-def virtual_addsitepackages(known_paths):
- force_global_eggs_after_local_site_packages()
- return addsitepackages(known_paths, sys_prefix=sys.real_prefix)
-
-def fixclasspath():
- """Adjust the special classpath sys.path entries for Jython. These
- entries should follow the base virtualenv lib directories.
- """
- paths = []
- classpaths = []
- for path in sys.path:
- if path == '__classpath__' or path.startswith('__pyclasspath__'):
- classpaths.append(path)
- else:
- paths.append(path)
- sys.path = paths
- sys.path.extend(classpaths)
-
-def execusercustomize():
- """Run custom user specific code, if available."""
- try:
- import usercustomize
- except ImportError:
- pass
-
-
-def main():
- global ENABLE_USER_SITE
- virtual_install_main_packages()
- abs__file__()
- paths_in_sys = removeduppaths()
- if (os.name == "posix" and sys.path and
- os.path.basename(sys.path[-1]) == "Modules"):
- addbuilddir()
- if _is_jython:
- fixclasspath()
- GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))
- if not GLOBAL_SITE_PACKAGES:
- ENABLE_USER_SITE = False
- if ENABLE_USER_SITE is None:
- ENABLE_USER_SITE = check_enableusersite()
- paths_in_sys = addsitepackages(paths_in_sys)
- paths_in_sys = addusersitepackages(paths_in_sys)
- if GLOBAL_SITE_PACKAGES:
- paths_in_sys = virtual_addsitepackages(paths_in_sys)
- if sys.platform == 'os2emx':
- setBEGINLIBPATH()
- setquit()
- setcopyright()
- sethelper()
- aliasmbcs()
- setencoding()
- execsitecustomize()
- if ENABLE_USER_SITE:
- execusercustomize()
- # Remove sys.setdefaultencoding() so that users cannot change the
- # encoding after initialization. The test for presence is needed when
- # this module is run as a script, because this code is executed twice.
- if hasattr(sys, "setdefaultencoding"):
- del sys.setdefaultencoding
-
-main()
-
-def _script():
- help = """\
- %s [--user-base] [--user-site]
-
- Without arguments print some useful information
- With arguments print the value of USER_BASE and/or USER_SITE separated
- by '%s'.
-
- Exit codes with --user-base or --user-site:
- 0 - user site directory is enabled
- 1 - user site directory is disabled by user
- 2 - uses site directory is disabled by super user
- or for security reasons
- >2 - unknown error
- """
- args = sys.argv[1:]
- if not args:
- print "sys.path = ["
- for dir in sys.path:
- print " %r," % (dir,)
- print "]"
- def exists(path):
- if os.path.isdir(path):
- return "exists"
- else:
- return "doesn't exist"
- print "USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE))
- print "USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE))
- print "ENABLE_USER_SITE: %r" % ENABLE_USER_SITE
- sys.exit(0)
-
- buffer = []
- if '--user-base' in args:
- buffer.append(USER_BASE)
- if '--user-site' in args:
- buffer.append(USER_SITE)
-
- if buffer:
- print os.pathsep.join(buffer)
- if ENABLE_USER_SITE:
- sys.exit(0)
- elif ENABLE_USER_SITE is False:
- sys.exit(1)
- elif ENABLE_USER_SITE is None:
- sys.exit(2)
- else:
- sys.exit(3)
- else:
- import textwrap
- print textwrap.dedent(help % (sys.argv[0], os.pathsep))
- sys.exit(10)
-
-if __name__ == '__main__':
- _script()
diff --git a/lib/python2.7/sre.py b/lib/python2.7/sre.py
deleted file mode 120000
index 27f81b93..00000000
--- a/lib/python2.7/sre.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/sre.py \ No newline at end of file
diff --git a/lib/python2.7/sre_compile.py b/lib/python2.7/sre_compile.py
deleted file mode 120000
index dce5da4c..00000000
--- a/lib/python2.7/sre_compile.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/sre_compile.py \ No newline at end of file
diff --git a/lib/python2.7/sre_constants.py b/lib/python2.7/sre_constants.py
deleted file mode 120000
index b9c9797e..00000000
--- a/lib/python2.7/sre_constants.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/sre_constants.py \ No newline at end of file
diff --git a/lib/python2.7/sre_parse.py b/lib/python2.7/sre_parse.py
deleted file mode 120000
index f33a572f..00000000
--- a/lib/python2.7/sre_parse.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/sre_parse.py \ No newline at end of file
diff --git a/lib/python2.7/stat.py b/lib/python2.7/stat.py
deleted file mode 120000
index c1d654c1..00000000
--- a/lib/python2.7/stat.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/stat.py \ No newline at end of file
diff --git a/lib/python2.7/types.py b/lib/python2.7/types.py
deleted file mode 120000
index 55464783..00000000
--- a/lib/python2.7/types.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/types.py \ No newline at end of file
diff --git a/lib/python2.7/warnings.py b/lib/python2.7/warnings.py
deleted file mode 120000
index a9c47309..00000000
--- a/lib/python2.7/warnings.py
+++ /dev/null
@@ -1 +0,0 @@
-/usr/lib/python2.7/warnings.py \ No newline at end of file
diff --git a/local_settings.py.example b/local_settings.py.example
index beb48f84..b5883720 100644
--- a/local_settings.py.example
+++ b/local_settings.py.example
@@ -1,9 +1,9 @@
-### Django settings for archlinux project.
+### Django settings for Parabola GNU/Linux-libre project.
## Debug settings
-DEBUG = False
-TEMPLATE_DEBUG = True
-DEBUG_TOOLBAR = True
+DEBUG = True # As far as I can tell, this must be True for /media/* URLs to work
+TEMPLATE_DEBUG = True # More helpful this way
+DEBUG_TOOLBAR = False # Must install package django-debug-toolbar to use
## For django debug toolbar
INTERNAL_IPS = ('127.0.0.1',)
@@ -17,15 +17,37 @@ ADMINS = (
DATABASES = {
'default': {
'ENGINE' : 'django.db.backends.mysql',
- 'NAME' : 'archlinux',
- 'USER' : 'archlinux',
- 'PASSWORD': 'archlinux',
+ 'NAME' : 'parabola',
+ 'USER' : 'parabola',
+ 'PASSWORD': 'parabola',
'HOST' : '',
'PORT' : '',
- 'OPTIONS' : {'init_command': 'SET storage_engine=InnoDB'},
+ # InnoDB WILL NOT work
+ 'OPTIONS' : {'init_command': 'SET storage_engine=MyISAM'},
},
}
+## PostgreSQL Database settings
+#DATABASES = {
+# 'default': {
+# 'ENGINE' : 'django.db.backends.postgresql_psycopg2',
+# 'NAME' : 'parabola',
+# 'USER' : 'parabola',
+# 'PASSWORD': 'parabola',
+# 'HOST' : '',
+# 'PORT' : '',
+# 'OPTIONS' : {},
+# },
+#}
+
+## sqlite3 Database settings
+#DATABASES = {
+# 'default': {
+# 'ENGINE' : 'sqlite3',
+# 'NAME' : '/srv/http/web/db.sqlite',
+# },
+#}
+
## Define cache settings
CACHES = {
'default': {
@@ -43,18 +65,17 @@ CACHE_MIDDLEWARE_SECONDS = 300
SESSION_COOKIE_SECURE = False
## location for saving dev pictures
-MEDIA_ROOT = '/srv/example.com/img/'
+MEDIA_ROOT = '/srv/http/media/devs/'
## web url for serving image files
-MEDIA_URL = 'http://example.com/img/'
+MEDIA_URL = '/media/'
## Make this unique, and don't share it with anybody.
SECRET_KEY = '00000000000000000000000000000000000000000000000'
## CDN settings
CDN_ENABLED = False
-CDN_PATH = 'http://example.com/path/'
-CDN_PATH_SECURE = 'https://example.com/path/'
-
+# Scheme-relative URL, should work for both http/https
+CDN_PATH = '//example.com/path/'
# vim: set ts=4 sw=4 et:
diff --git a/main/admin.py b/main/admin.py
index e86e5cab..e5da9fb9 100644
--- a/main/admin.py
+++ b/main/admin.py
@@ -4,9 +4,10 @@ from django.contrib.auth.admin import UserAdmin
from main.models import Arch, Donor, Package, Repo, Todolist, UserProfile
class DonorAdmin(admin.ModelAdmin):
- list_display = ('name', 'visible')
- list_filter = ('visible',)
+ list_display = ('name', 'visible', 'created')
+ list_filter = ('visible', 'created')
search_fields = ('name',)
+ exclude = ('created',)
class ArchAdmin(admin.ModelAdmin):
list_display = ('name', 'agnostic')
diff --git a/main/fields.py b/main/fields.py
new file mode 100644
index 00000000..948cb5d9
--- /dev/null
+++ b/main/fields.py
@@ -0,0 +1,42 @@
+from django.db import models
+from django.core.validators import RegexValidator
+
+
+class PositiveBigIntegerField(models.BigIntegerField):
+ _south_introspects = True
+
+ def get_internal_type(self):
+ return "BigIntegerField"
+
+ def formfield(self, **kwargs):
+ defaults = { 'min_value': 0 }
+ defaults.update(kwargs)
+ return super(PositiveBigIntegerField, self).formfield(**defaults)
+
+class PGPKeyField(models.CharField):
+ _south_introspects = True
+
+ def __init__(self, *args, **kwargs):
+ super(PGPKeyField, self).__init__(*args, **kwargs)
+ self.validators.append(RegexValidator(r'^[0-9A-F]{40}$',
+ "Ensure this value consists of 40 hex characters.", 'hex_char'))
+
+ def to_python(self, value):
+ if value == '' or value is None:
+ return None
+ value = super(PGPKeyField, self).to_python(value)
+ # remove all spaces
+ value = value.replace(' ', '')
+ # prune prefixes, either 0x or 2048R/ type
+ if value.startswith('0x'):
+ value = value[2:]
+ value = value.split('/')[-1]
+ # make all (hex letters) uppercase
+ return value.upper()
+
+ def formfield(self, **kwargs):
+ # override so we don't set max_length form field attribute
+ return models.Field.formfield(self, **kwargs)
+
+
+# vim: set ts=4 sw=4 et:
diff --git a/main/fixtures/arches.json b/main/fixtures/arches.json
index 6334c2d3..1e9dbc91 100644
--- a/main/fixtures/arches.json
+++ b/main/fixtures/arches.json
@@ -22,5 +22,13 @@
"agnostic": false,
"name": "x86_64"
}
+ },
+ {
+ "pk": 4,
+ "model": "main.arch",
+ "fields": {
+ "agnostic": false,
+ "name": "mips64el"
+ }
}
]
diff --git a/main/fixtures/groups.json b/main/fixtures/groups.json
index 8a6b2287..aa826b83 100644
--- a/main/fixtures/groups.json
+++ b/main/fixtures/groups.json
@@ -11,11 +11,6 @@
"package"
],
[
- "add_signoff",
- "main",
- "signoff"
- ],
- [
"add_todolist",
"main",
"todolist"
@@ -39,16 +34,6 @@
"delete_todolistpkg",
"main",
"todolistpkg"
- ],
- [
- "add_news",
- "news",
- "news"
- ],
- [
- "change_news",
- "news",
- "news"
]
]
}
@@ -342,11 +327,6 @@
"package"
],
[
- "add_signoff",
- "main",
- "signoff"
- ],
- [
"add_todolist",
"main",
"todolist"
@@ -403,4 +383,4 @@
]
}
}
-] \ No newline at end of file
+]
diff --git a/main/middleware.py b/main/middleware.py
index f893c795..f417b545 100644
--- a/main/middleware.py
+++ b/main/middleware.py
@@ -4,7 +4,7 @@
from django.conf import settings
from django.core.cache import cache
-from django.utils.cache import get_cache_key, learn_cache_key, patch_response_headers, get_max_age
+from django.utils.cache import learn_cache_key, patch_response_headers, get_max_age
class UpdateCacheMiddleware(object):
"""
diff --git a/main/migrations/0054_auto__add_field_donor_created.py b/main/migrations/0054_auto__add_field_donor_created.py
new file mode 100644
index 00000000..f4d5b157
--- /dev/null
+++ b/main/migrations/0054_auto__add_field_donor_created.py
@@ -0,0 +1,157 @@
+# encoding: utf-8
+import datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding field 'Donor.created'
+ db.add_column('donors', 'created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.date(2000, 1, 1)), keep_default=False)
+
+
+ def backwards(self, orm):
+ # Deleting field 'Donor.created'
+ db.delete_column('donors', 'created')
+
+
+ models = {
+ 'auth.group': {
+ 'Meta': {'object_name': 'Group'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
+ 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
+ },
+ 'auth.permission': {
+ 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
+ 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
+ },
+ 'auth.user': {
+ 'Meta': {'object_name': 'User'},
+ 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
+ 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
+ },
+ 'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ 'main.arch': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"},
+ 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
+ },
+ 'main.donor': {
+ 'Meta': {'ordering': "('name',)", 'object_name': 'Donor', 'db_table': "'donors'"},
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
+ 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
+ },
+ 'main.package': {
+ 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"},
+ 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}),
+ 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'last_update': ('django.db.models.fields.DateTimeField', [], {}),
+ 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
+ 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
+ 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}),
+ 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
+ },
+ 'main.packagedepend': {
+ 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"},
+ 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
+ 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.packagefile': {
+ 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"},
+ 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.repo': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"},
+ 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
+ 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
+ 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ 'main.todolist': {
+ 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"},
+ 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ 'main.todolistpkg': {
+ 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"},
+ 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.userprofile': {
+ 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"},
+ 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
+ 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
+ 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}),
+ 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}),
+ 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}),
+ 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
+ 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
+ }
+ }
+
+ complete_apps = ['main']
diff --git a/main/migrations/0055_unique_package_in_repo.py b/main/migrations/0055_unique_package_in_repo.py
new file mode 100644
index 00000000..63951a08
--- /dev/null
+++ b/main/migrations/0055_unique_package_in_repo.py
@@ -0,0 +1,155 @@
+# encoding: utf-8
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ db.delete_index('packages', ['pkgname'])
+ db.create_unique('packages', ['pkgname', 'repo_id', 'arch_id'])
+
+ def backwards(self, orm):
+ db.delete_unique('packages', ['pkgname', 'repo_id', 'arch_id'])
+ db.create_index('packages', ['pkgname'])
+
+
+ models = {
+ 'auth.group': {
+ 'Meta': {'object_name': 'Group'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
+ 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
+ },
+ 'auth.permission': {
+ 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
+ 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
+ },
+ 'auth.user': {
+ 'Meta': {'object_name': 'User'},
+ 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
+ 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
+ },
+ 'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ 'main.arch': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"},
+ 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
+ },
+ 'main.donor': {
+ 'Meta': {'ordering': "('name',)", 'object_name': 'Donor', 'db_table': "'donors'"},
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
+ 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
+ },
+ 'main.package': {
+ 'Meta': {'ordering': "('pkgname',)", 'unique_together': "(('pkgname', 'repo', 'arch'),)", 'object_name': 'Package', 'db_table': "'packages'"},
+ 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}),
+ 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'last_update': ('django.db.models.fields.DateTimeField', [], {}),
+ 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
+ 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
+ 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}),
+ 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
+ },
+ 'main.packagedepend': {
+ 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"},
+ 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
+ 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.packagefile': {
+ 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"},
+ 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.repo': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"},
+ 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
+ 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
+ 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ 'main.todolist': {
+ 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"},
+ 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ 'main.todolistpkg': {
+ 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"},
+ 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.userprofile': {
+ 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"},
+ 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
+ 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
+ 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}),
+ 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}),
+ 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}),
+ 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
+ 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
+ }
+ }
+
+ complete_apps = ['main']
diff --git a/main/migrations/0056_auto__chg_field_package_pkgdesc.py b/main/migrations/0056_auto__chg_field_package_pkgdesc.py
new file mode 100644
index 00000000..21dd43af
--- /dev/null
+++ b/main/migrations/0056_auto__chg_field_package_pkgdesc.py
@@ -0,0 +1,153 @@
+# encoding: utf-8
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ db.alter_column('packages', 'pkgdesc', self.gf('django.db.models.fields.TextField')(null=True))
+
+ def backwards(self, orm):
+ db.alter_column('packages', 'pkgdesc', self.gf('django.db.models.fields.CharField')(max_length=255, null=True))
+
+
+ models = {
+ 'auth.group': {
+ 'Meta': {'object_name': 'Group'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
+ 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
+ },
+ 'auth.permission': {
+ 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
+ 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
+ },
+ 'auth.user': {
+ 'Meta': {'object_name': 'User'},
+ 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
+ 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
+ },
+ 'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ 'main.arch': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"},
+ 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
+ },
+ 'main.donor': {
+ 'Meta': {'ordering': "('name',)", 'object_name': 'Donor', 'db_table': "'donors'"},
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
+ 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
+ },
+ 'main.package': {
+ 'Meta': {'ordering': "('pkgname',)", 'unique_together': "(('pkgname', 'repo', 'arch'),)", 'object_name': 'Package', 'db_table': "'packages'"},
+ 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}),
+ 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'last_update': ('django.db.models.fields.DateTimeField', [], {}),
+ 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
+ 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgdesc': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+ 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}),
+ 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
+ },
+ 'main.packagedepend': {
+ 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"},
+ 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
+ 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.packagefile': {
+ 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"},
+ 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.repo': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"},
+ 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
+ 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
+ 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ 'main.todolist': {
+ 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"},
+ 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ 'main.todolistpkg': {
+ 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"},
+ 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.userprofile': {
+ 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"},
+ 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
+ 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
+ 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}),
+ 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}),
+ 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}),
+ 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
+ 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
+ }
+ }
+
+ complete_apps = ['main']
diff --git a/main/migrations/0057_auto__add_field_userprofile_latin_name.py b/main/migrations/0057_auto__add_field_userprofile_latin_name.py
new file mode 100644
index 00000000..ffde1885
--- /dev/null
+++ b/main/migrations/0057_auto__add_field_userprofile_latin_name.py
@@ -0,0 +1,153 @@
+# encoding: utf-8
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ db.add_column('user_profiles', 'latin_name', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True), keep_default=False)
+
+ def backwards(self, orm):
+ db.delete_column('user_profiles', 'latin_name')
+
+ models = {
+ 'auth.group': {
+ 'Meta': {'object_name': 'Group'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
+ 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
+ },
+ 'auth.permission': {
+ 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
+ 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
+ },
+ 'auth.user': {
+ 'Meta': {'object_name': 'User'},
+ 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
+ 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
+ },
+ 'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ 'main.arch': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"},
+ 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
+ },
+ 'main.donor': {
+ 'Meta': {'ordering': "('name',)", 'object_name': 'Donor', 'db_table': "'donors'"},
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
+ 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
+ },
+ 'main.package': {
+ 'Meta': {'ordering': "('pkgname',)", 'unique_together': "(('pkgname', 'repo', 'arch'),)", 'object_name': 'Package', 'db_table': "'packages'"},
+ 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}),
+ 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'last_update': ('django.db.models.fields.DateTimeField', [], {}),
+ 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
+ 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgdesc': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+ 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}),
+ 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
+ },
+ 'main.packagedepend': {
+ 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"},
+ 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
+ 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.packagefile': {
+ 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"},
+ 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.repo': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"},
+ 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
+ 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
+ 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ 'main.todolist': {
+ 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"},
+ 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ 'main.todolistpkg': {
+ 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"},
+ 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
+ },
+ 'main.userprofile': {
+ 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"},
+ 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'latin_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
+ 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
+ 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}),
+ 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}),
+ 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}),
+ 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
+ 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
+ }
+ }
+
+ complete_apps = ['main']
diff --git a/main/models.py b/main/models.py
index b5cd8638..9156fb51 100644
--- a/main/models.py
+++ b/main/models.py
@@ -1,52 +1,17 @@
from django.db import models
-from django.core.validators import RegexValidator
+from django.db.models.signals import pre_save
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.forms import ValidationError
-from main.utils import cache_function, make_choice
+from .fields import PositiveBigIntegerField, PGPKeyField
+from .utils import cache_function, make_choice, set_created_field
from packages.models import PackageRelation
-from packages.models import Signoff as PackageSignoff
from datetime import datetime
from itertools import groupby
import pytz
-class PositiveBigIntegerField(models.BigIntegerField):
- _south_introspects = True
-
- def get_internal_type(self):
- return "BigIntegerField"
-
- def formfield(self, **kwargs):
- defaults = {'min_value': 0}
- defaults.update(kwargs)
- return super(PositiveBigIntegerField, self).formfield(**defaults)
-
-class PGPKeyField(models.CharField):
- _south_introspects = True
-
- def to_python(self, value):
- if value == '':
- return None
- value = super(PGPKeyField, self).to_python(value)
- # remove all spaces
- value = value.replace(' ', '')
- # prune prefixes, either 0x or 2048R/ type
- if value.startswith('0x'):
- value = value[2:]
- value = value.split('/')[-1]
- return value
-
- def formfield(self, **kwargs):
- # override so we don't set max_length form field attribute
- return models.Field.formfield(self, **kwargs)
-
-def validate_pgp_key_length(value):
- if len(value) not in (8, 16, 40):
- raise ValidationError(
- u'Ensure this value has 8, 16, or 40 characters (it has %d).' % len(value),
- 'pgp_key_value')
class UserProfile(models.Model):
notify = models.BooleanField(
@@ -66,10 +31,8 @@ class UserProfile(models.Model):
help_text="Required field")
other_contact = models.CharField(max_length=100, null=True, blank=True)
pgp_key = PGPKeyField(max_length=40, null=True, blank=True,
- verbose_name="PGP key", validators=[RegexValidator(r'^[0-9A-F]+$',
- "Ensure this value consists of only hex characters.", 'hex_char'),
- validate_pgp_key_length],
- help_text="PGP Key ID or fingerprint (8, 16, or 40 hex digits)")
+ verbose_name="PGP key fingerprint",
+ help_text="consists of 40 hex digits; use `gpg --fingerprint`")
website = models.CharField(max_length=200, null=True, blank=True)
yob = models.IntegerField("Year of birth", null=True, blank=True)
location = models.CharField(max_length=50, null=True, blank=True)
@@ -82,12 +45,26 @@ class UserProfile(models.Model):
help_text="Ideally 125px by 125px")
user = models.OneToOneField(User, related_name='userprofile')
allowed_repos = models.ManyToManyField('Repo', blank=True)
+ latin_name = models.CharField(max_length=255, null=True, blank=True,
+ help_text="Latin-form name; used only for non-Latin full names")
class Meta:
db_table = 'user_profiles'
verbose_name = 'Additional Profile Data'
verbose_name_plural = 'Additional Profile Data'
+ def get_absolute_url(self):
+ # TODO: this is disgusting. find a way to consolidate this logic with
+ # public.views.userlist among other places, and make some constants or
+ # something so we aren't using copies of string names everywhere.
+ group_names = self.user.groups.values_list('name', flat=True)
+ if "Developers" in group_names:
+ prefix = "developers"
+ elif "Trusted Users" in group_names:
+ prefix = "trustedusers"
+ else:
+ prefix = "fellows"
+ return '/%s/#%s' % (prefix, self.user.username)
class TodolistManager(models.Manager):
def incomplete(self):
@@ -98,6 +75,10 @@ class PackageManager(models.Manager):
"""Used by dev dashboard."""
return self.filter(flag_date__isnull=False)
+ def signed(self):
+ """Used by dev dashboard."""
+ return self.filter(pgp_signature__isnull=False)
+
def normal(self):
return self.select_related('arch', 'repo')
@@ -105,13 +86,15 @@ class Donor(models.Model):
name = models.CharField(max_length=255, unique=True)
visible = models.BooleanField(default=True,
help_text="Should we show this donor on the public page?")
+ created = models.DateTimeField()
def __unicode__(self):
return self.name
class Meta:
db_table = 'donors'
- ordering = ['name']
+ ordering = ('name',)
+ get_latest_by = 'when'
class Arch(models.Model):
name = models.CharField(max_length=255, unique=True)
@@ -158,12 +141,12 @@ class Package(models.Model):
on_delete=models.PROTECT)
arch = models.ForeignKey(Arch, related_name="packages",
on_delete=models.PROTECT)
- pkgname = models.CharField(max_length=255, db_index=True)
+ pkgname = models.CharField(max_length=255)
pkgbase = models.CharField(max_length=255, db_index=True)
pkgver = models.CharField(max_length=255)
pkgrel = models.CharField(max_length=255)
epoch = models.PositiveIntegerField(default=0)
- pkgdesc = models.CharField(max_length=255, null=True)
+ pkgdesc = models.TextField(null=True)
url = models.CharField(max_length=255, null=True)
filename = models.CharField(max_length=255)
compressed_size = PositiveBigIntegerField()
@@ -178,10 +161,12 @@ class Package(models.Model):
flag_date = models.DateTimeField(null=True)
objects = PackageManager()
+
class Meta:
db_table = 'packages'
ordering = ('pkgname',)
get_latest_by = 'last_update'
+ unique_together = (('pkgname', 'repo', 'arch'),)
def __unicode__(self):
return self.pkgname
@@ -204,21 +189,19 @@ class Package(models.Model):
def is_signed(self):
return bool(self.pgp_signature)
- @property
- def maintainers(self):
- return User.objects.filter(
- package_relations__pkgbase=self.pkgbase,
- package_relations__type=PackageRelation.MAINTAINER)
+ _maintainers = None
@property
- def signoffs(self):
- return PackageSignoff.objects.select_related('user').filter(
- pkgbase=self.pkgbase, pkgver=self.pkgver, pkgrel=self.pkgrel,
- epoch=self.epoch, arch=self.arch, repo=self.repo)
+ def maintainers(self):
+ if self._maintainers is None:
+ self._maintainers = User.objects.filter(
+ package_relations__pkgbase=self.pkgbase,
+ package_relations__type=PackageRelation.MAINTAINER)
+ return self._maintainers
- def approved_for_signoff(self):
- count = self.signoffs.filter(revoked__isnull=True).count()
- return count >= PackageSignoff.REQUIRED
+ @maintainers.setter
+ def maintainers(self, maintainers):
+ self._maintainers = maintainers
@cache_function(300)
def applicable_arches(self):
@@ -460,12 +443,17 @@ class Todolist(models.Model):
def __unicode__(self):
return self.name
+ _packages = None
+
@property
def packages(self):
- # select_related() does not use LEFT OUTER JOIN for nullable ForeignKey
- # fields. That is why we need to explicitly list the ones we want.
- return TodolistPkg.objects.select_related(
- 'pkg__repo', 'pkg__arch').filter(list=self).order_by('pkg')
+ if not self._packages:
+ # select_related() does not use LEFT OUTER JOIN for nullable
+ # ForeignKey fields. That is why we need to explicitly list the
+ # ones we want.
+ self._packages = TodolistPkg.objects.select_related(
+ 'pkg__repo', 'pkg__arch').filter(list=self).order_by('pkg')
+ return self._packages
@property
def package_names(self):
@@ -478,10 +466,16 @@ class Todolist(models.Model):
def get_absolute_url(self):
return '/todo/%i/' % self.id
+ def get_full_url(self, proto='https'):
+ '''get a URL suitable for things like email including the domain'''
+ domain = Site.objects.get_current().domain
+ return '%s://%s%s' % (proto, domain, self.get_absolute_url())
+
class TodolistPkg(models.Model):
list = models.ForeignKey(Todolist)
pkg = models.ForeignKey(Package)
complete = models.BooleanField(default=False)
+
class Meta:
db_table = 'todolist_pkgs'
unique_together = (('list','pkg'),)
@@ -499,5 +493,7 @@ post_save.connect(refresh_latest, sender=Package,
dispatch_uid="main.models")
pre_save.connect(set_todolist_fields, sender=Todolist,
dispatch_uid="main.models")
+pre_save.connect(set_created_field, sender=Donor,
+ dispatch_uid="main.models")
# vim: set ts=4 sw=4 et:
diff --git a/main/templatetags/cdn.py b/main/templatetags/cdn.py
index c25040c0..5cb12fcf 100644
--- a/main/templatetags/cdn.py
+++ b/main/templatetags/cdn.py
@@ -23,12 +23,7 @@ class CDNPrefixNode(template.Node):
oncdn = getattr(settings, 'CDN_ENABLED', True)
if not oncdn:
return ''
- secure = 'secure' in context and context['secure']
# if left undefined, same behavior as if CDN is turned off
- paths = {
- False: getattr(settings, 'CDN_PATH', ''),
- True: getattr(settings, 'CDN_PATH_SECURE', ''),
- }
- return paths[secure]
+ return getattr(settings, 'CDN_PATH', '')
# vim: set ts=4 sw=4 et:
diff --git a/main/templatetags/pgp.py b/main/templatetags/pgp.py
index 956de892..d69e2918 100644
--- a/main/templatetags/pgp.py
+++ b/main/templatetags/pgp.py
@@ -1,10 +1,11 @@
from django import template
from django.conf import settings
+from django.utils.html import conditional_escape
+from django.utils.safestring import mark_safe
register = template.Library()
def format_key(key_id):
- print len(key_id)
if len(key_id) in (8, 20):
return u'0x%s' % key_id
elif len(key_id) == 40:
@@ -22,9 +23,20 @@ def pgp_key_link(key_id):
pgp_server = getattr(settings, 'PGP_SERVER', None)
if not pgp_server:
return format_key(key_id)
- url = 'http://%s/pks/lookup?op=vindex&fingerprint=on&exact=on&search=0x%s' % \
+ url = 'http://%s/pks/lookup?op=vindex&amp;fingerprint=on&amp;exact=on&amp;search=0x%s' % \
(pgp_server, key_id)
- values = (url, key_id, format_key(key_id))
- return '<a href="%s" title="PGP key search for 0x%s">%s</a>' % values
+ values = (url, format_key(key_id), key_id[-8:])
+ return '<a href="%s" title="PGP key search for %s">0x%s</a>' % values
+
+@register.filter
+def pgp_fingerprint(key_id, autoescape=True):
+ if not key_id:
+ return u''
+ if autoescape:
+ esc = conditional_escape
+ else:
+ esc = lambda x: x
+ return mark_safe(format_key(esc(key_id)))
+pgp_fingerprint.needs_autoescape = True
# vim: set ts=4 sw=4 et:
diff --git a/media/CP_EN_BK_S_001.gif b/media/CP_EN_BK_S_001.gif
new file mode 100644
index 00000000..41cf0885
--- /dev/null
+++ b/media/CP_EN_BK_S_001.gif
Binary files differ
diff --git a/media/admin_media b/media/admin_media
index 585cf837..3d6781eb 120000
--- a/media/admin_media
+++ b/media/admin_media
@@ -1 +1 @@
-/usr/lib/python2.7/site-packages/django/contrib/admin/media \ No newline at end of file
+../../web-env/lib/python2.7/site-packages/django/contrib/admin/media/ \ No newline at end of file
diff --git a/media/archnavbar/archnavbar.css b/media/archnavbar/archnavbar.css
index 6b82cb92..f83b8544 100644
--- a/media/archnavbar/archnavbar.css
+++ b/media/archnavbar/archnavbar.css
@@ -7,11 +7,9 @@
*/
/* container for the entire bar */
-#archnavbar { height: 40px !important; padding: 10px 15px !important;
-background: #000 !important; border-bottom: 5px #787DAB solid !important; }
+#archnavbar { height: 40px !important; padding: 10px 15px !important; background: #000 !important; border-bottom: 5px #787DAB solid !important; }
-#archnavbarlogo { float: left !important; margin: 0 !important; padding: 0
-!important; height: 50px !important; width: 397px !important; }
+#archnavbarlogo { float: left !important; margin: 0 !important; padding: 0 !important; height: 50px !important; width: 397px !important; }
/* and use a proper PNG for all other modern browsers */
html > body #archnavbarlogo { background: url('parabolabw.png') no-repeat !important; }
@@ -20,8 +18,7 @@ html > body #archnavbarlogo { background: url('parabolabw.png') no-repeat !impor
#archnavbarlogo h1 { margin: 0 !important; padding: 0 !important; text-indent: -9999px !important; }
/* make the link the same size as the logo */
-#archnavbarlogo a { display: block !important; height: 50px !important; width:
-397px !important; }
+#archnavbarlogo a { display: block !important; height: 50px !important; width: 397px !important; }
/* display the list inline, float it to the right and style it */
#archnavbar ul { display: inline !important; float: right !important; list-style: none !important; margin: 0 !important; padding: 0 !important; }
diff --git a/media/archweb.css b/media/archweb.css
index 6a2f96cc..cb962648 100644
--- a/media/archweb.css
+++ b/media/archweb.css
@@ -53,7 +53,7 @@ pre {
border: 1px solid #bdb;
background: #dfd;
padding: 0.5em;
- margin: 1em;
+ margin: 0.25em 2em;
}
pre code {
@@ -70,15 +70,18 @@ input {
}
select[multiple] {
- padding: 1px 0;
+ padding-top: 1px;
+ padding-bottom: 1px;
}
select[multiple] option {
- padding: 0 0.5em 0 0.3em;
+ padding-left: 0.3em;
+ padding-right: 0.5em;
}
input[type=submit] {
- padding: 0 0.6em;
+ padding-left: 0.6em;
+ padding-right: 0.6em;
}
.clear {
@@ -131,7 +134,7 @@ h2 {
h3 {
font-size: 1.25em;
- margin-top: .5em;
+ margin-top: 1em;
}
h4 {
@@ -263,25 +266,6 @@ table.pretty2 {
border: 1px dotted #bbb;
}
-/* definition lists */
-dl {
- clear: both;
-}
-
- dl dt,
- dl dd {
- margin-bottom: 4px;
- padding: 8px 0px 4px;
- font-weight: bold;
- border-top: 1px solid #888;
- }
-
- dl dt {
- color: #333;
- float:left;
- padding-right:15px;
- }
-
/* forms and input styling */
form p {
margin: 0.5em 0;
@@ -374,8 +358,7 @@ ul.errorlist {
}
#news h3 {
- float: left;
- padding-bottom: .5em
+ border-bottom: 1px solid #888;
}
#news div {
@@ -392,11 +375,10 @@ ul.errorlist {
#news .rss-icon {
float: right;
- margin-top: 1em;
+ margin: -1.6em 0.4em 0 0;
}
#news h4 {
- clear: both;
font-size: 1em;
margin-top: 1.5em;
border-bottom: 1px dotted #bbb;
@@ -408,31 +390,6 @@ ul.errorlist {
margin: -1.8em 0.5em 0 0;
}
-/* home: arrowed headings */
-#news h3 a {
- display: block;
- background: #787DAB;
- font-size: 15px;
- padding: 2px 10px;
- color: white;
-}
-
- #news a:active {
- color: white;
- }
-
-h3 span.arrow {
- display: block;
- width: 0px;
- height: 0px;
- border-left: 6px solid transparent;
- border-right: 6px solid transparent;
- border-top: 6px solid #787DAB;
- margin: 0 auto;
- font-size: 0px;
- line-height: 0px;
-}
-
/* home: pkgsearch box */
#pkgsearch {
padding: 1em 0.75em;
@@ -536,7 +493,8 @@ div.widget {
.news-nav .prev,
.news-nav .next {
- margin: 0 1em;
+ margin-left: 1em;
+ margin-right: 1em;
}
/* news: article pages */
@@ -608,6 +566,18 @@ table.results {
background-color:#fff;
}
+ table.results td {
+ padding: .3em 1em .3em 3px;
+ }
+
+ table.results tr.odd {
+ background: #fff;
+ }
+
+ table.results tr.even {
+ background: #e4eeff;
+ }
+
/* additional styles for JS sorting */
table.results th.header {
padding-right: 20px;
@@ -627,18 +597,6 @@ table.results {
background-image: url(asc.gif);
}
- table.results td {
- padding: .3em 1em .3em 3px;
- }
-
- table.results tr.odd {
- background: #fff;
- }
-
- table.results tr.even {
- background: #e4eeff;
- }
-
table.results .flagged {
color: red;
}
@@ -739,10 +697,6 @@ form#flag-pkg-form input[type=text] {
}
/* pkgdetails: deps, required by and file lists */
-#pkgdetails #metadata {
- clear: both;
-}
-
#pkgdetails #metadata h3 {
background: #555;
color: #fff;
@@ -830,6 +784,7 @@ table.arch-bio-entry table.bio {
}
/* dev: login/out */
+p.login-error {}
table#dev-login {
width: auto;
}
@@ -931,15 +886,8 @@ ul.admin-actions {
}
#releng-feedback ul+.helptext {
- position: relative; top: -0.9em;
-}
-
-#releng-result .success-yes {
- color: green;
-}
-
-#releng-result .success-no {
- color: red;
+ position: relative;
+ top: -0.9em;
}
/* highlight current website in the navbar */
diff --git a/media/archweb.js b/media/archweb.js
index 49f2a319..151d0f81 100644
--- a/media/archweb.js
+++ b/media/archweb.js
@@ -6,7 +6,7 @@ if (typeof $.tablesorter !== 'undefined') {
is: function(s) { return false; },
format: function(s) {
var m = s.match(/\d+/);
- return m ? parseInt(m[0]) : 0;
+ return m ? parseInt(m[0], 10) : 0;
},
type: 'numeric'
});
@@ -27,7 +27,9 @@ if (typeof $.tablesorter !== 'undefined') {
return ($.inArray(s, this.special) > -1) || $.tablesorter.isDigit(s, c);
},
format: function(s) {
- if ($.inArray(s, this.special) > -1) return Number.MAX_VALUE;
+ if ($.inArray(s, this.special) > -1) {
+ return Number.MAX_VALUE;
+ }
return $.tablesorter.formatFloat(s);
},
type: 'numeric'
@@ -41,14 +43,31 @@ if (typeof $.tablesorter !== 'undefined') {
return ($.inArray(s, this.special) > -1) || this.re.test(s);
},
format: function(s) {
- if ($.inArray(s, this.special) > -1) return Number.MAX_VALUE;
+ if ($.inArray(s, this.special) > -1) {
+ return Number.MAX_VALUE;
+ }
var matches = this.re.exec(s);
- if (!matches) return Number.MAX_VALUE;
+ if (!matches) {
+ return Number.MAX_VALUE;
+ }
return matches[1] * 60 + matches[2];
},
type: 'numeric'
});
$.tablesorter.addParser({
+ id: 'epochdate',
+ is: function(s) { return false; },
+ format: function(s, t, c) {
+ /* TODO: this assumes our magic class is the only one */
+ var epoch = $(c).attr('class');
+ if (!epoch.indexOf('epoch-') == 0) {
+ return 0;
+ }
+ return epoch.slice(6);
+ },
+ type: 'numeric'
+ });
+ $.tablesorter.addParser({
id: 'longDateTime',
re: /^(\d{4})-(\d{2})-(\d{2}) ([012]\d):([0-5]\d)(:([0-5]\d))?( (\w+))?$/,
is: function(s) {
@@ -56,9 +75,13 @@ if (typeof $.tablesorter !== 'undefined') {
},
format: function(s) {
var matches = this.re.exec(s);
- if (!matches) return 0;
+ if (!matches) {
+ return 0;
+ }
/* skip group 6, group 7 is optional seconds */
- if (matches[7] == undefined) matches[7] = 0;
+ if (matches[7] === undefined) {
+ matches[7] = 0;
+ }
/* The awesomeness of the JS date constructor. Month needs to be
* between 0-11, because things have to be difficult. */
var date = new Date(matches[1], matches[2] - 1, matches[3],
@@ -75,25 +98,26 @@ if (typeof $.tablesorter !== 'undefined') {
},
format: function(s) {
var matches = this.re.exec(s);
- if (!matches) return 0;
+ if (!matches) {
+ return 0;
+ }
var size = parseFloat(matches[1]);
var suffix = matches[2];
switch(suffix) {
- case 'byte':
- case 'bytes':
- return size;
- case 'KB':
- return size * 1024;
- case 'MB':
- return size * 1024 * 1024;
- case 'GB':
- return size * 1024 * 1024 * 1024;
- case 'TB':
- return size * 1024 * 1024 * 1024 * 1024;
+ /* intentional fall-through at each level */
case 'PB':
- return size * 1024 * 1024 * 1024 * 1024 * 1024;
+ size *= 1024;
+ case 'TB':
+ size *= 1024;
+ case 'GB':
+ size *= 1024;
+ case 'MB':
+ size *= 1024;
+ case 'KB':
+ size *= 1024;
}
+ return size;
},
type: 'numeric'
});
@@ -128,7 +152,7 @@ function ajaxifyFiles() {
/* packages/differences.html */
function filter_packages() {
- // start with all rows, and then remove ones we shouldn't show
+ /* start with all rows, and then remove ones we shouldn't show */
var rows = $('#tbody_differences').children();
var all_rows = rows;
if (!$('#id_multilib').is(':checked')) {
@@ -139,34 +163,42 @@ function filter_packages() {
rows = rows.filter('.' + arch);
}
if (!$('#id_minor').is(':checked')) {
- // this check is done last because it is the most expensive
+ /* this check is done last because it is the most expensive */
var pat = /(.*)-(.+)/;
rows = rows.filter(function(index) {
var cells = $(this).children('td');
- // all this just to get the split version out of the table cell
+ /* all this just to get the split version out of the table cell */
var ver_a = cells.eq(2).find('span').text().match(pat);
- if (!ver_a) return true;
+ if (!ver_a) {
+ return true;
+ }
var ver_b = cells.eq(3).find('span').text().match(pat);
- if (!ver_b) return true;
+ if (!ver_b) {
+ return true;
+ }
- // first check pkgver
- if (ver_a[1] !== ver_b[1]) return true;
- // pkgver matched, so see if rounded pkgrel matches
- if (Math.floor(parseFloat(ver_a[2])) ==
- Math.floor(parseFloat(ver_b[2]))) return false;
- // pkgrel didn't match, so keep the row
+ /* first check pkgver */
+ if (ver_a[1] !== ver_b[1]) {
+ return true;
+ }
+ /* pkgver matched, so see if rounded pkgrel matches */
+ if (Math.floor(parseFloat(ver_a[2])) ===
+ Math.floor(parseFloat(ver_b[2]))) {
+ return false;
+ }
+ /* pkgrel didn't match, so keep the row */
return true;
});
}
- // hide all rows, then show the set we care about
+ /* hide all rows, then show the set we care about */
all_rows.hide();
rows.show();
- // make sure we update the odd/even styling from sorting
+ /* make sure we update the odd/even styling from sorting */
$('.results').trigger('applyWidgets');
}
-function filter_reset() {
+function filter_packages_reset() {
$('#id_archonly').val('both');
$('#id_multilib').removeAttr('checked');
$('#id_minor').removeAttr('checked');
@@ -194,21 +226,98 @@ function todolist_flag() {
function signoff_package() {
var link = this;
$.getJSON(link.href, function(data) {
+ link = $(link);
+ var signoff = null;
+ var cell = link.closest('td');
if (data.created) {
- var signoff = $('<li>').addClass('signed-username').text(data.user);
- $(link).append(signoff);
+ signoff = $('<li>').addClass('signed-username').text(data.user);
+ var list = cell.children('ul.signoff-list');
+ if (list.size() == 0) {
+ list = $('<ul class="signoff-list">').prependTo(cell);
+ }
+ list.append(signoff);
+ } else if(data.user) {
+ signoff = link.closest('td').find('li').filter(function(index) {
+ return $(this).text() == data.user;
+ });
+ }
+ if (signoff && data.revoked) {
+ signoff.text(signoff.text() + ' (revoked)');
}
/* update the approved column to reflect reality */
- if (data.approved) {
- var approved = $(link).closest('tr').children('.signoff-no');
- approved.text('Yes').addClass(
- 'signoff-yes').removeClass('signoff-no');
+ var approved = link.closest('tr').children('.approval');
+ approved.attr('class', '');
+ if (data.known_bad) {
+ approved.text('Bad').addClass('signoff-bad');
+ } else if (!data.enabled) {
+ approved.text('Disabled').addClass('signoff-disabled');
+ } else if (data.approved) {
+ approved.text('Yes').addClass('signoff-yes');
+ } else {
+ approved.text('No').addClass('signoff-no');
+ }
+ link.removeAttr('title');
+ /* Form our new link. The current will be something like
+ * '/packages/repo/arch/package/...' */
+ var base_href = link.attr('href').split('/').slice(0, 5).join('/');
+ if (data.revoked) {
+ link.text('Signoff');
+ link.attr('href', base_href + '/signoff/');
+ /* should we be hiding the link? */
+ if (data.known_bad || !data.enabled) {
+ link.remove();
+ }
} else {
- var approved = $(link).closest('tr').children('.signoff-yes');
- approved.text('No').addClass(
- 'signoff-no').removeClass('signoff-yes');
+ link.text('Revoke Signoff');
+ link.attr('href', base_href + '/signoff/revoke/');
}
$('.results').trigger('updateCell', approved);
});
return false;
}
+
+function filter_signoffs() {
+ /* start with all rows, and then remove ones we shouldn't show */
+ var rows = $('#tbody_signoffs').children();
+ var all_rows = rows;
+ /* apply arch and repo filters */
+ $('#signoffs_filter .arch_filter').add(
+ '#signoffs_filter .repo_filter').each(function() {
+ if (!$(this).is(':checked')) {
+ rows = rows.not('.' + $(this).val());
+ }
+ });
+ /* and then the slightly more expensive pending check */
+ if ($('#id_pending').is(':checked')) {
+ rows = rows.has('td.signoff-no');
+ }
+ /* hide all rows, then show the set we care about */
+ all_rows.hide();
+ rows.show();
+ $('#filter-count').text(rows.length);
+ /* make sure we update the odd/even styling from sorting */
+ $('.results').trigger('applyWidgets');
+}
+function filter_signoffs_reset() {
+ $('#signoffs_filter .arch_filter').attr('checked', 'checked');
+ $('#signoffs_filter .repo_filter').attr('checked', 'checked');
+ $('#id_pending').removeAttr('checked');
+ filter_signoffs();
+}
+
+/* visualizations */
+function format_filesize(size, decimals) {
+ /*var labels = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];*/
+ var labels = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
+ var label = 0;
+
+ while (size > 2048.0 && label < labels.length - 1) {
+ label++;
+ size /= 1024.0;
+ }
+ if (decimals === undefined) {
+ decimals = 2;
+ }
+
+ return size.toFixed(decimals) + ' ' + labels[label];
+}
diff --git a/media/d3.js b/media/d3.js
new file mode 100644
index 00000000..23edb6b1
--- /dev/null
+++ b/media/d3.js
@@ -0,0 +1,4148 @@
+(function(){if (!Date.now) Date.now = function() {
+ return +new Date;
+};
+try {
+ document.createElement("div").style.setProperty("opacity", 0, "");
+} catch (error) {
+ var d3_style_prototype = CSSStyleDeclaration.prototype,
+ d3_style_setProperty = d3_style_prototype.setProperty;
+ d3_style_prototype.setProperty = function(name, value, priority) {
+ d3_style_setProperty.call(this, name, value + "", priority);
+ };
+}
+d3 = {version: "2.4.3"}; // semver
+var d3_array = d3_arraySlice; // conversion for NodeLists
+
+function d3_arrayCopy(pseudoarray) {
+ var i = -1, n = pseudoarray.length, array = [];
+ while (++i < n) array.push(pseudoarray[i]);
+ return array;
+}
+
+function d3_arraySlice(pseudoarray) {
+ return Array.prototype.slice.call(pseudoarray);
+}
+
+try {
+ d3_array(document.documentElement.childNodes)[0].nodeType;
+} catch(e) {
+ d3_array = d3_arrayCopy;
+}
+
+var d3_arraySubclass = [].__proto__?
+
+// Until ECMAScript supports array subclassing, prototype injection works well.
+function(array, prototype) {
+ array.__proto__ = prototype;
+}:
+
+// And if your browser doesn't support __proto__, we'll use direct extension.
+function(array, prototype) {
+ for (var property in prototype) array[property] = prototype[property];
+};
+function d3_this() {
+ return this;
+}
+d3.functor = function(v) {
+ return typeof v === "function" ? v : function() { return v; };
+};
+// A getter-setter method that preserves the appropriate `this` context.
+d3.rebind = function(object, method) {
+ return function() {
+ var x = method.apply(object, arguments);
+ return arguments.length ? object : x;
+ };
+};
+d3.ascending = function(a, b) {
+ return a < b ? -1 : a > b ? 1 : a >= b ? 0 : NaN;
+};
+d3.descending = function(a, b) {
+ return b < a ? -1 : b > a ? 1 : b >= a ? 0 : NaN;
+};
+d3.mean = function(array, f) {
+ var n = array.length,
+ a,
+ m = 0,
+ i = -1,
+ j = 0;
+ if (arguments.length === 1) {
+ while (++i < n) if (d3_number(a = array[i])) m += (a - m) / ++j;
+ } else {
+ while (++i < n) if (d3_number(a = f.call(array, array[i], i))) m += (a - m) / ++j;
+ }
+ return j ? m : undefined;
+};
+d3.median = function(array, f) {
+ if (arguments.length > 1) array = array.map(f);
+ array = array.filter(d3_number);
+ return array.length ? d3.quantile(array.sort(d3.ascending), .5) : undefined;
+};
+d3.min = function(array, f) {
+ var i = -1,
+ n = array.length,
+ a,
+ b;
+ if (arguments.length === 1) {
+ while (++i < n && ((a = array[i]) == null || a != a)) a = undefined;
+ while (++i < n) if ((b = array[i]) != null && a > b) a = b;
+ } else {
+ while (++i < n && ((a = f.call(array, array[i], i)) == null || a != a)) a = undefined;
+ while (++i < n) if ((b = f.call(array, array[i], i)) != null && a > b) a = b;
+ }
+ return a;
+};
+d3.max = function(array, f) {
+ var i = -1,
+ n = array.length,
+ a,
+ b;
+ if (arguments.length === 1) {
+ while (++i < n && ((a = array[i]) == null || a != a)) a = undefined;
+ while (++i < n) if ((b = array[i]) != null && b > a) a = b;
+ } else {
+ while (++i < n && ((a = f.call(array, array[i], i)) == null || a != a)) a = undefined;
+ while (++i < n) if ((b = f.call(array, array[i], i)) != null && b > a) a = b;
+ }
+ return a;
+};
+function d3_number(x) {
+ return x != null && !isNaN(x);
+}
+d3.sum = function(array, f) {
+ var s = 0,
+ n = array.length,
+ a,
+ i = -1;
+
+ if (arguments.length === 1) {
+ while (++i < n) if (!isNaN(a = +array[i])) s += a;
+ } else {
+ while (++i < n) if (!isNaN(a = +f.call(array, array[i], i))) s += a;
+ }
+
+ return s;
+};
+// R-7 per <http://en.wikipedia.org/wiki/Quantile>
+d3.quantile = function(values, p) {
+ var H = (values.length - 1) * p + 1,
+ h = Math.floor(H),
+ v = values[h - 1],
+ e = H - h;
+ return e ? v + e * (values[h] - v) : v;
+};
+d3.zip = function() {
+ if (!(n = arguments.length)) return [];
+ for (var i = -1, m = d3.min(arguments, d3_zipLength), zips = new Array(m); ++i < m;) {
+ for (var j = -1, n, zip = zips[i] = new Array(n); ++j < n;) {
+ zip[j] = arguments[j][i];
+ }
+ }
+ return zips;
+};
+
+function d3_zipLength(d) {
+ return d.length;
+}
+// Locate the insertion point for x in a to maintain sorted order. The
+// arguments lo and hi may be used to specify a subset of the array which should
+// be considered; by default the entire array is used. If x is already present
+// in a, the insertion point will be before (to the left of) any existing
+// entries. The return value is suitable for use as the first argument to
+// `array.splice` assuming that a is already sorted.
+//
+// The returned insertion point i partitions the array a into two halves so that
+// all v < x for v in a[lo:i] for the left side and all v >= x for v in a[i:hi]
+// for the right side.
+d3.bisectLeft = function(a, x, lo, hi) {
+ if (arguments.length < 3) lo = 0;
+ if (arguments.length < 4) hi = a.length;
+ while (lo < hi) {
+ var mid = (lo + hi) >> 1;
+ if (a[mid] < x) lo = mid + 1;
+ else hi = mid;
+ }
+ return lo;
+};
+
+// Similar to bisectLeft, but returns an insertion point which comes after (to
+// the right of) any existing entries of x in a.
+//
+// The returned insertion point i partitions the array into two halves so that
+// all v <= x for v in a[lo:i] for the left side and all v > x for v in a[i:hi]
+// for the right side.
+d3.bisect =
+d3.bisectRight = function(a, x, lo, hi) {
+ if (arguments.length < 3) lo = 0;
+ if (arguments.length < 4) hi = a.length;
+ while (lo < hi) {
+ var mid = (lo + hi) >> 1;
+ if (x < a[mid]) hi = mid;
+ else lo = mid + 1;
+ }
+ return lo;
+};
+d3.first = function(array, f) {
+ var i = 0,
+ n = array.length,
+ a = array[0],
+ b;
+ if (arguments.length === 1) f = d3.ascending;
+ while (++i < n) {
+ if (f.call(array, a, b = array[i]) > 0) {
+ a = b;
+ }
+ }
+ return a;
+};
+d3.last = function(array, f) {
+ var i = 0,
+ n = array.length,
+ a = array[0],
+ b;
+ if (arguments.length === 1) f = d3.ascending;
+ while (++i < n) {
+ if (f.call(array, a, b = array[i]) <= 0) {
+ a = b;
+ }
+ }
+ return a;
+};
+d3.nest = function() {
+ var nest = {},
+ keys = [],
+ sortKeys = [],
+ sortValues,
+ rollup;
+
+ function map(array, depth) {
+ if (depth >= keys.length) return rollup
+ ? rollup.call(nest, array) : (sortValues
+ ? array.sort(sortValues)
+ : array);
+
+ var i = -1,
+ n = array.length,
+ key = keys[depth++],
+ keyValue,
+ object,
+ o = {};
+
+ while (++i < n) {
+ if ((keyValue = key(object = array[i])) in o) {
+ o[keyValue].push(object);
+ } else {
+ o[keyValue] = [object];
+ }
+ }
+
+ for (keyValue in o) {
+ o[keyValue] = map(o[keyValue], depth);
+ }
+
+ return o;
+ }
+
+ function entries(map, depth) {
+ if (depth >= keys.length) return map;
+
+ var a = [],
+ sortKey = sortKeys[depth++],
+ key;
+
+ for (key in map) {
+ a.push({key: key, values: entries(map[key], depth)});
+ }
+
+ if (sortKey) a.sort(function(a, b) {
+ return sortKey(a.key, b.key);
+ });
+
+ return a;
+ }
+
+ nest.map = function(array) {
+ return map(array, 0);
+ };
+
+ nest.entries = function(array) {
+ return entries(map(array, 0), 0);
+ };
+
+ nest.key = function(d) {
+ keys.push(d);
+ return nest;
+ };
+
+ // Specifies the order for the most-recently specified key.
+ // Note: only applies to entries. Map keys are unordered!
+ nest.sortKeys = function(order) {
+ sortKeys[keys.length - 1] = order;
+ return nest;
+ };
+
+ // Specifies the order for leaf values.
+ // Applies to both maps and entries array.
+ nest.sortValues = function(order) {
+ sortValues = order;
+ return nest;
+ };
+
+ nest.rollup = function(f) {
+ rollup = f;
+ return nest;
+ };
+
+ return nest;
+};
+d3.keys = function(map) {
+ var keys = [];
+ for (var key in map) keys.push(key);
+ return keys;
+};
+d3.values = function(map) {
+ var values = [];
+ for (var key in map) values.push(map[key]);
+ return values;
+};
+d3.entries = function(map) {
+ var entries = [];
+ for (var key in map) entries.push({key: key, value: map[key]});
+ return entries;
+};
+d3.permute = function(array, indexes) {
+ var permutes = [],
+ i = -1,
+ n = indexes.length;
+ while (++i < n) permutes[i] = array[indexes[i]];
+ return permutes;
+};
+d3.merge = function(arrays) {
+ return Array.prototype.concat.apply([], arrays);
+};
+d3.split = function(array, f) {
+ var arrays = [],
+ values = [],
+ value,
+ i = -1,
+ n = array.length;
+ if (arguments.length < 2) f = d3_splitter;
+ while (++i < n) {
+ if (f.call(values, value = array[i], i)) {
+ values = [];
+ } else {
+ if (!values.length) arrays.push(values);
+ values.push(value);
+ }
+ }
+ return arrays;
+};
+
+function d3_splitter(d) {
+ return d == null;
+}
+function d3_collapse(s) {
+ return s.replace(/(^\s+)|(\s+$)/g, "").replace(/\s+/g, " ");
+}
+/**
+ * @param {number} start
+ * @param {number=} stop
+ * @param {number=} step
+ */
+d3.range = function(start, stop, step) {
+ if (arguments.length < 3) {
+ step = 1;
+ if (arguments.length < 2) {
+ stop = start;
+ start = 0;
+ }
+ }
+ if ((stop - start) / step == Infinity) throw new Error("infinite range");
+ var range = [],
+ i = -1,
+ j;
+ if (step < 0) while ((j = start + step * ++i) > stop) range.push(j);
+ else while ((j = start + step * ++i) < stop) range.push(j);
+ return range;
+};
+d3.requote = function(s) {
+ return s.replace(d3_requote_re, "\\$&");
+};
+
+var d3_requote_re = /[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g;
+d3.round = function(x, n) {
+ return n
+ ? Math.round(x * Math.pow(10, n)) * Math.pow(10, -n)
+ : Math.round(x);
+};
+d3.xhr = function(url, mime, callback) {
+ var req = new XMLHttpRequest;
+ if (arguments.length < 3) callback = mime;
+ else if (mime && req.overrideMimeType) req.overrideMimeType(mime);
+ req.open("GET", url, true);
+ req.onreadystatechange = function() {
+ if (req.readyState === 4) callback(req.status < 300 ? req : null);
+ };
+ req.send(null);
+};
+d3.text = function(url, mime, callback) {
+ function ready(req) {
+ callback(req && req.responseText);
+ }
+ if (arguments.length < 3) {
+ callback = mime;
+ mime = null;
+ }
+ d3.xhr(url, mime, ready);
+};
+d3.json = function(url, callback) {
+ d3.text(url, "application/json", function(text) {
+ callback(text ? JSON.parse(text) : null);
+ });
+};
+d3.html = function(url, callback) {
+ d3.text(url, "text/html", function(text) {
+ if (text != null) { // Treat empty string as valid HTML.
+ var range = document.createRange();
+ range.selectNode(document.body);
+ text = range.createContextualFragment(text);
+ }
+ callback(text);
+ });
+};
+d3.xml = function(url, mime, callback) {
+ function ready(req) {
+ callback(req && req.responseXML);
+ }
+ if (arguments.length < 3) {
+ callback = mime;
+ mime = null;
+ }
+ d3.xhr(url, mime, ready);
+};
+d3.ns = {
+
+ prefix: {
+ svg: "http://www.w3.org/2000/svg",
+ xhtml: "http://www.w3.org/1999/xhtml",
+ xlink: "http://www.w3.org/1999/xlink",
+ xml: "http://www.w3.org/XML/1998/namespace",
+ xmlns: "http://www.w3.org/2000/xmlns/"
+ },
+
+ qualify: function(name) {
+ var i = name.indexOf(":");
+ return i < 0 ? name : {
+ space: d3.ns.prefix[name.substring(0, i)],
+ local: name.substring(i + 1)
+ };
+ }
+
+};
+/** @param {...string} types */
+d3.dispatch = function(types) {
+ var dispatch = {},
+ type;
+ for (var i = 0, n = arguments.length; i < n; i++) {
+ type = arguments[i];
+ dispatch[type] = d3_dispatch(type);
+ }
+ return dispatch;
+};
+
+function d3_dispatch(type) {
+ var dispatch = {},
+ listeners = [];
+
+ dispatch.add = function(listener) {
+ for (var i = 0; i < listeners.length; i++) {
+ if (listeners[i].listener == listener) return dispatch; // already registered
+ }
+ listeners.push({listener: listener, on: true});
+ return dispatch;
+ };
+
+ dispatch.remove = function(listener) {
+ for (var i = 0; i < listeners.length; i++) {
+ var l = listeners[i];
+ if (l.listener == listener) {
+ l.on = false;
+ listeners = listeners.slice(0, i).concat(listeners.slice(i + 1));
+ break;
+ }
+ }
+ return dispatch;
+ };
+
+ dispatch.dispatch = function() {
+ var ls = listeners; // defensive reference
+ for (var i = 0, n = ls.length; i < n; i++) {
+ var l = ls[i];
+ if (l.on) l.listener.apply(this, arguments);
+ }
+ };
+
+ return dispatch;
+};
+// TODO align
+d3.format = function(specifier) {
+ var match = d3_format_re.exec(specifier),
+ fill = match[1] || " ",
+ sign = match[3] || "",
+ zfill = match[5],
+ width = +match[6],
+ comma = match[7],
+ precision = match[8],
+ type = match[9],
+ scale = 1,
+ suffix = "",
+ integer = false;
+
+ if (precision) precision = +precision.substring(1);
+
+ if (zfill) {
+ fill = "0"; // TODO align = "=";
+ if (comma) width -= Math.floor((width - 1) / 4);
+ }
+
+ switch (type) {
+ case "n": comma = true; type = "g"; break;
+ case "%": scale = 100; suffix = "%"; type = "f"; break;
+ case "p": scale = 100; suffix = "%"; type = "r"; break;
+ case "d": integer = true; precision = 0; break;
+ case "s": scale = -1; type = "r"; break;
+ }
+
+ // If no precision is specified for r, fallback to general notation.
+ if (type == "r" && !precision) type = "g";
+
+ type = d3_format_types[type] || d3_format_typeDefault;
+
+ return function(value) {
+
+ // Return the empty string for floats formatted as ints.
+ if (integer && (value % 1)) return "";
+
+ // Convert negative to positive, and record the sign prefix.
+ var negative = (value < 0) && (value = -value) ? "\u2212" : sign;
+
+ // Apply the scale, computing it from the value's exponent for si format.
+ if (scale < 0) {
+ var prefix = d3.formatPrefix(value, precision);
+ value *= prefix.scale;
+ suffix = prefix.symbol;
+ } else {
+ value *= scale;
+ }
+
+ // Convert to the desired precision.
+ value = type(value, precision);
+
+ // If the fill character is 0, the sign and group is applied after the fill.
+ if (zfill) {
+ var length = value.length + negative.length;
+ if (length < width) value = new Array(width - length + 1).join(fill) + value;
+ if (comma) value = d3_format_group(value);
+ value = negative + value;
+ }
+
+ // Otherwise (e.g., space-filling), the sign and group is applied before.
+ else {
+ if (comma) value = d3_format_group(value);
+ value = negative + value;
+ var length = value.length;
+ if (length < width) value = new Array(width - length + 1).join(fill) + value;
+ }
+
+ return value + suffix;
+ };
+};
+
+// [[fill]align][sign][#][0][width][,][.precision][type]
+var d3_format_re = /(?:([^{])?([<>=^]))?([+\- ])?(#)?(0)?([0-9]+)?(,)?(\.[0-9]+)?([a-zA-Z%])?/;
+
+var d3_format_types = {
+ g: function(x, p) { return x.toPrecision(p); },
+ e: function(x, p) { return x.toExponential(p); },
+ f: function(x, p) { return x.toFixed(p); },
+ r: function(x, p) { return d3.round(x, p = d3_format_precision(x, p)).toFixed(Math.max(0, Math.min(20, p))); }
+};
+
+function d3_format_precision(x, p) {
+ return p - (x ? 1 + Math.floor(Math.log(x + Math.pow(10, 1 + Math.floor(Math.log(x) / Math.LN10) - p)) / Math.LN10) : 1);
+}
+
+function d3_format_typeDefault(x) {
+ return x + "";
+}
+
+// Apply comma grouping for thousands.
+function d3_format_group(value) {
+ var i = value.lastIndexOf("."),
+ f = i >= 0 ? value.substring(i) : (i = value.length, ""),
+ t = [];
+ while (i > 0) t.push(value.substring(i -= 3, i + 3));
+ return t.reverse().join(",") + f;
+}
+var d3_formatPrefixes = ["y","z","a","f","p","n","μ","m","","k","M","G","T","P","E","Z","Y"].map(d3_formatPrefix);
+
+d3.formatPrefix = function(value, precision) {
+ var i = 0;
+ if (value) {
+ if (value < 0) value *= -1;
+ if (precision) value = d3.round(value, d3_format_precision(value, precision));
+ i = 1 + Math.floor(1e-12 + Math.log(value) / Math.LN10);
+ i = Math.max(-24, Math.min(24, Math.floor((i <= 0 ? i + 1 : i - 1) / 3) * 3));
+ }
+ return d3_formatPrefixes[8 + i / 3];
+};
+
+function d3_formatPrefix(d, i) {
+ return {
+ scale: Math.pow(10, (8 - i) * 3),
+ symbol: d
+ };
+}
+
+/*
+ * TERMS OF USE - EASING EQUATIONS
+ *
+ * Open source under the BSD License.
+ *
+ * Copyright 2001 Robert Penner
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * - Neither the name of the author nor the names of contributors may be used to
+ * endorse or promote products derived from this software without specific
+ * prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+var d3_ease_quad = d3_ease_poly(2),
+ d3_ease_cubic = d3_ease_poly(3);
+
+var d3_ease = {
+ linear: function() { return d3_ease_linear; },
+ poly: d3_ease_poly,
+ quad: function() { return d3_ease_quad; },
+ cubic: function() { return d3_ease_cubic; },
+ sin: function() { return d3_ease_sin; },
+ exp: function() { return d3_ease_exp; },
+ circle: function() { return d3_ease_circle; },
+ elastic: d3_ease_elastic,
+ back: d3_ease_back,
+ bounce: function() { return d3_ease_bounce; }
+};
+
+var d3_ease_mode = {
+ "in": function(f) { return f; },
+ "out": d3_ease_reverse,
+ "in-out": d3_ease_reflect,
+ "out-in": function(f) { return d3_ease_reflect(d3_ease_reverse(f)); }
+};
+
+d3.ease = function(name) {
+ var i = name.indexOf("-"),
+ t = i >= 0 ? name.substring(0, i) : name,
+ m = i >= 0 ? name.substring(i + 1) : "in";
+ return d3_ease_clamp(d3_ease_mode[m](d3_ease[t].apply(null, Array.prototype.slice.call(arguments, 1))));
+};
+
+function d3_ease_clamp(f) {
+ return function(t) {
+ return t <= 0 ? 0 : t >= 1 ? 1 : f(t);
+ };
+}
+
+function d3_ease_reverse(f) {
+ return function(t) {
+ return 1 - f(1 - t);
+ };
+}
+
+function d3_ease_reflect(f) {
+ return function(t) {
+ return .5 * (t < .5 ? f(2 * t) : (2 - f(2 - 2 * t)));
+ };
+}
+
+function d3_ease_linear(t) {
+ return t;
+}
+
+function d3_ease_poly(e) {
+ return function(t) {
+ return Math.pow(t, e);
+ }
+}
+
+function d3_ease_sin(t) {
+ return 1 - Math.cos(t * Math.PI / 2);
+}
+
+function d3_ease_exp(t) {
+ return Math.pow(2, 10 * (t - 1));
+}
+
+function d3_ease_circle(t) {
+ return 1 - Math.sqrt(1 - t * t);
+}
+
+function d3_ease_elastic(a, p) {
+ var s;
+ if (arguments.length < 2) p = 0.45;
+ if (arguments.length < 1) { a = 1; s = p / 4; }
+ else s = p / (2 * Math.PI) * Math.asin(1 / a);
+ return function(t) {
+ return 1 + a * Math.pow(2, 10 * -t) * Math.sin((t - s) * 2 * Math.PI / p);
+ };
+}
+
+function d3_ease_back(s) {
+ if (!s) s = 1.70158;
+ return function(t) {
+ return t * t * ((s + 1) * t - s);
+ };
+}
+
+function d3_ease_bounce(t) {
+ return t < 1 / 2.75 ? 7.5625 * t * t
+ : t < 2 / 2.75 ? 7.5625 * (t -= 1.5 / 2.75) * t + .75
+ : t < 2.5 / 2.75 ? 7.5625 * (t -= 2.25 / 2.75) * t + .9375
+ : 7.5625 * (t -= 2.625 / 2.75) * t + .984375;
+}
+d3.event = null;
+d3.interpolate = function(a, b) {
+ var i = d3.interpolators.length, f;
+ while (--i >= 0 && !(f = d3.interpolators[i](a, b)));
+ return f;
+};
+
+d3.interpolateNumber = function(a, b) {
+ b -= a;
+ return function(t) { return a + b * t; };
+};
+
+d3.interpolateRound = function(a, b) {
+ b -= a;
+ return function(t) { return Math.round(a + b * t); };
+};
+
+d3.interpolateString = function(a, b) {
+ var m, // current match
+ i, // current index
+ j, // current index (for coallescing)
+ s0 = 0, // start index of current string prefix
+ s1 = 0, // end index of current string prefix
+ s = [], // string constants and placeholders
+ q = [], // number interpolators
+ n, // q.length
+ o;
+
+ // Reset our regular expression!
+ d3_interpolate_number.lastIndex = 0;
+
+ // Find all numbers in b.
+ for (i = 0; m = d3_interpolate_number.exec(b); ++i) {
+ if (m.index) s.push(b.substring(s0, s1 = m.index));
+ q.push({i: s.length, x: m[0]});
+ s.push(null);
+ s0 = d3_interpolate_number.lastIndex;
+ }
+ if (s0 < b.length) s.push(b.substring(s0));
+
+ // Find all numbers in a.
+ for (i = 0, n = q.length; (m = d3_interpolate_number.exec(a)) && i < n; ++i) {
+ o = q[i];
+ if (o.x == m[0]) { // The numbers match, so coallesce.
+ if (o.i) {
+ if (s[o.i + 1] == null) { // This match is followed by another number.
+ s[o.i - 1] += o.x;
+ s.splice(o.i, 1);
+ for (j = i + 1; j < n; ++j) q[j].i--;
+ } else { // This match is followed by a string, so coallesce twice.
+ s[o.i - 1] += o.x + s[o.i + 1];
+ s.splice(o.i, 2);
+ for (j = i + 1; j < n; ++j) q[j].i -= 2;
+ }
+ } else {
+ if (s[o.i + 1] == null) { // This match is followed by another number.
+ s[o.i] = o.x;
+ } else { // This match is followed by a string, so coallesce twice.
+ s[o.i] = o.x + s[o.i + 1];
+ s.splice(o.i + 1, 1);
+ for (j = i + 1; j < n; ++j) q[j].i--;
+ }
+ }
+ q.splice(i, 1);
+ n--;
+ i--;
+ } else {
+ o.x = d3.interpolateNumber(parseFloat(m[0]), parseFloat(o.x));
+ }
+ }
+
+ // Remove any numbers in b not found in a.
+ while (i < n) {
+ o = q.pop();
+ if (s[o.i + 1] == null) { // This match is followed by another number.
+ s[o.i] = o.x;
+ } else { // This match is followed by a string, so coallesce twice.
+ s[o.i] = o.x + s[o.i + 1];
+ s.splice(o.i + 1, 1);
+ }
+ n--;
+ }
+
+ // Special optimization for only a single match.
+ if (s.length === 1) {
+ return s[0] == null ? q[0].x : function() { return b; };
+ }
+
+ // Otherwise, interpolate each of the numbers and rejoin the string.
+ return function(t) {
+ for (i = 0; i < n; ++i) s[(o = q[i]).i] = o.x(t);
+ return s.join("");
+ };
+};
+
+d3.interpolateRgb = function(a, b) {
+ a = d3.rgb(a);
+ b = d3.rgb(b);
+ var ar = a.r,
+ ag = a.g,
+ ab = a.b,
+ br = b.r - ar,
+ bg = b.g - ag,
+ bb = b.b - ab;
+ return function(t) {
+ return "#"
+ + d3_rgb_hex(Math.round(ar + br * t))
+ + d3_rgb_hex(Math.round(ag + bg * t))
+ + d3_rgb_hex(Math.round(ab + bb * t));
+ };
+};
+
+// interpolates HSL space, but outputs RGB string (for compatibility)
+d3.interpolateHsl = function(a, b) {
+ a = d3.hsl(a);
+ b = d3.hsl(b);
+ var h0 = a.h,
+ s0 = a.s,
+ l0 = a.l,
+ h1 = b.h - h0,
+ s1 = b.s - s0,
+ l1 = b.l - l0;
+ return function(t) {
+ return d3_hsl_rgb(h0 + h1 * t, s0 + s1 * t, l0 + l1 * t).toString();
+ };
+};
+
+d3.interpolateArray = function(a, b) {
+ var x = [],
+ c = [],
+ na = a.length,
+ nb = b.length,
+ n0 = Math.min(a.length, b.length),
+ i;
+ for (i = 0; i < n0; ++i) x.push(d3.interpolate(a[i], b[i]));
+ for (; i < na; ++i) c[i] = a[i];
+ for (; i < nb; ++i) c[i] = b[i];
+ return function(t) {
+ for (i = 0; i < n0; ++i) c[i] = x[i](t);
+ return c;
+ };
+};
+
+d3.interpolateObject = function(a, b) {
+ var i = {},
+ c = {},
+ k;
+ for (k in a) {
+ if (k in b) {
+ i[k] = d3_interpolateByName(k)(a[k], b[k]);
+ } else {
+ c[k] = a[k];
+ }
+ }
+ for (k in b) {
+ if (!(k in a)) {
+ c[k] = b[k];
+ }
+ }
+ return function(t) {
+ for (k in i) c[k] = i[k](t);
+ return c;
+ };
+}
+
+var d3_interpolate_number = /[-+]?(?:\d+\.\d+|\d+\.|\.\d+|\d+)(?:[eE][-]?\d+)?/g,
+ d3_interpolate_rgb = {background: 1, fill: 1, stroke: 1};
+
+function d3_interpolateByName(n) {
+ return n in d3_interpolate_rgb || /\bcolor\b/.test(n)
+ ? d3.interpolateRgb
+ : d3.interpolate;
+}
+
+d3.interpolators = [
+ d3.interpolateObject,
+ function(a, b) { return (b instanceof Array) && d3.interpolateArray(a, b); },
+ function(a, b) { return (typeof b === "string") && d3.interpolateString(String(a), b); },
+ function(a, b) { return (typeof b === "string" ? b in d3_rgb_names || /^(#|rgb\(|hsl\()/.test(b) : b instanceof d3_Rgb || b instanceof d3_Hsl) && d3.interpolateRgb(String(a), b); },
+ function(a, b) { return (typeof b === "number") && d3.interpolateNumber(+a, b); }
+];
+function d3_uninterpolateNumber(a, b) {
+ b = b - (a = +a) ? 1 / (b - a) : 0;
+ return function(x) { return (x - a) * b; };
+}
+
+function d3_uninterpolateClamp(a, b) {
+ b = b - (a = +a) ? 1 / (b - a) : 0;
+ return function(x) { return Math.max(0, Math.min(1, (x - a) * b)); };
+}
+d3.rgb = function(r, g, b) {
+ return arguments.length === 1
+ ? (r instanceof d3_Rgb ? d3_rgb(r.r, r.g, r.b)
+ : d3_rgb_parse("" + r, d3_rgb, d3_hsl_rgb))
+ : d3_rgb(~~r, ~~g, ~~b);
+};
+
+function d3_rgb(r, g, b) {
+ return new d3_Rgb(r, g, b);
+}
+
+function d3_Rgb(r, g, b) {
+ this.r = r;
+ this.g = g;
+ this.b = b;
+}
+
+d3_Rgb.prototype.brighter = function(k) {
+ k = Math.pow(0.7, arguments.length ? k : 1);
+ var r = this.r,
+ g = this.g,
+ b = this.b,
+ i = 30;
+ if (!r && !g && !b) return d3_rgb(i, i, i);
+ if (r && r < i) r = i;
+ if (g && g < i) g = i;
+ if (b && b < i) b = i;
+ return d3_rgb(
+ Math.min(255, Math.floor(r / k)),
+ Math.min(255, Math.floor(g / k)),
+ Math.min(255, Math.floor(b / k)));
+};
+
+d3_Rgb.prototype.darker = function(k) {
+ k = Math.pow(0.7, arguments.length ? k : 1);
+ return d3_rgb(
+ Math.floor(k * this.r),
+ Math.floor(k * this.g),
+ Math.floor(k * this.b));
+};
+
+d3_Rgb.prototype.hsl = function() {
+ return d3_rgb_hsl(this.r, this.g, this.b);
+};
+
+d3_Rgb.prototype.toString = function() {
+ return "#" + d3_rgb_hex(this.r) + d3_rgb_hex(this.g) + d3_rgb_hex(this.b);
+};
+
+function d3_rgb_hex(v) {
+ return v < 0x10
+ ? "0" + Math.max(0, v).toString(16)
+ : Math.min(255, v).toString(16);
+}
+
+function d3_rgb_parse(format, rgb, hsl) {
+ var r = 0, // red channel; int in [0, 255]
+ g = 0, // green channel; int in [0, 255]
+ b = 0, // blue channel; int in [0, 255]
+ m1, // CSS color specification match
+ m2, // CSS color specification type (e.g., rgb)
+ name;
+
+ /* Handle hsl, rgb. */
+ m1 = /([a-z]+)\((.*)\)/i.exec(format);
+ if (m1) {
+ m2 = m1[2].split(",");
+ switch (m1[1]) {
+ case "hsl": {
+ return hsl(
+ parseFloat(m2[0]), // degrees
+ parseFloat(m2[1]) / 100, // percentage
+ parseFloat(m2[2]) / 100 // percentage
+ );
+ }
+ case "rgb": {
+ return rgb(
+ d3_rgb_parseNumber(m2[0]),
+ d3_rgb_parseNumber(m2[1]),
+ d3_rgb_parseNumber(m2[2])
+ );
+ }
+ }
+ }
+
+ /* Named colors. */
+ if (name = d3_rgb_names[format]) return rgb(name.r, name.g, name.b);
+
+ /* Hexadecimal colors: #rgb and #rrggbb. */
+ if (format != null && format.charAt(0) === "#") {
+ if (format.length === 4) {
+ r = format.charAt(1); r += r;
+ g = format.charAt(2); g += g;
+ b = format.charAt(3); b += b;
+ } else if (format.length === 7) {
+ r = format.substring(1, 3);
+ g = format.substring(3, 5);
+ b = format.substring(5, 7);
+ }
+ r = parseInt(r, 16);
+ g = parseInt(g, 16);
+ b = parseInt(b, 16);
+ }
+
+ return rgb(r, g, b);
+}
+
+function d3_rgb_hsl(r, g, b) {
+ var min = Math.min(r /= 255, g /= 255, b /= 255),
+ max = Math.max(r, g, b),
+ d = max - min,
+ h,
+ s,
+ l = (max + min) / 2;
+ if (d) {
+ s = l < .5 ? d / (max + min) : d / (2 - max - min);
+ if (r == max) h = (g - b) / d + (g < b ? 6 : 0);
+ else if (g == max) h = (b - r) / d + 2;
+ else h = (r - g) / d + 4;
+ h *= 60;
+ } else {
+ s = h = 0;
+ }
+ return d3_hsl(h, s, l);
+}
+
+function d3_rgb_parseNumber(c) { // either integer or percentage
+ var f = parseFloat(c);
+ return c.charAt(c.length - 1) === "%" ? Math.round(f * 2.55) : f;
+}
+
+var d3_rgb_names = {
+ aliceblue: "#f0f8ff",
+ antiquewhite: "#faebd7",
+ aqua: "#00ffff",
+ aquamarine: "#7fffd4",
+ azure: "#f0ffff",
+ beige: "#f5f5dc",
+ bisque: "#ffe4c4",
+ black: "#000000",
+ blanchedalmond: "#ffebcd",
+ blue: "#0000ff",
+ blueviolet: "#8a2be2",
+ brown: "#a52a2a",
+ burlywood: "#deb887",
+ cadetblue: "#5f9ea0",
+ chartreuse: "#7fff00",
+ chocolate: "#d2691e",
+ coral: "#ff7f50",
+ cornflowerblue: "#6495ed",
+ cornsilk: "#fff8dc",
+ crimson: "#dc143c",
+ cyan: "#00ffff",
+ darkblue: "#00008b",
+ darkcyan: "#008b8b",
+ darkgoldenrod: "#b8860b",
+ darkgray: "#a9a9a9",
+ darkgreen: "#006400",
+ darkgrey: "#a9a9a9",
+ darkkhaki: "#bdb76b",
+ darkmagenta: "#8b008b",
+ darkolivegreen: "#556b2f",
+ darkorange: "#ff8c00",
+ darkorchid: "#9932cc",
+ darkred: "#8b0000",
+ darksalmon: "#e9967a",
+ darkseagreen: "#8fbc8f",
+ darkslateblue: "#483d8b",
+ darkslategray: "#2f4f4f",
+ darkslategrey: "#2f4f4f",
+ darkturquoise: "#00ced1",
+ darkviolet: "#9400d3",
+ deeppink: "#ff1493",
+ deepskyblue: "#00bfff",
+ dimgray: "#696969",
+ dimgrey: "#696969",
+ dodgerblue: "#1e90ff",
+ firebrick: "#b22222",
+ floralwhite: "#fffaf0",
+ forestgreen: "#228b22",
+ fuchsia: "#ff00ff",
+ gainsboro: "#dcdcdc",
+ ghostwhite: "#f8f8ff",
+ gold: "#ffd700",
+ goldenrod: "#daa520",
+ gray: "#808080",
+ green: "#008000",
+ greenyellow: "#adff2f",
+ grey: "#808080",
+ honeydew: "#f0fff0",
+ hotpink: "#ff69b4",
+ indianred: "#cd5c5c",
+ indigo: "#4b0082",
+ ivory: "#fffff0",
+ khaki: "#f0e68c",
+ lavender: "#e6e6fa",
+ lavenderblush: "#fff0f5",
+ lawngreen: "#7cfc00",
+ lemonchiffon: "#fffacd",
+ lightblue: "#add8e6",
+ lightcoral: "#f08080",
+ lightcyan: "#e0ffff",
+ lightgoldenrodyellow: "#fafad2",
+ lightgray: "#d3d3d3",
+ lightgreen: "#90ee90",
+ lightgrey: "#d3d3d3",
+ lightpink: "#ffb6c1",
+ lightsalmon: "#ffa07a",
+ lightseagreen: "#20b2aa",
+ lightskyblue: "#87cefa",
+ lightslategray: "#778899",
+ lightslategrey: "#778899",
+ lightsteelblue: "#b0c4de",
+ lightyellow: "#ffffe0",
+ lime: "#00ff00",
+ limegreen: "#32cd32",
+ linen: "#faf0e6",
+ magenta: "#ff00ff",
+ maroon: "#800000",
+ mediumaquamarine: "#66cdaa",
+ mediumblue: "#0000cd",
+ mediumorchid: "#ba55d3",
+ mediumpurple: "#9370db",
+ mediumseagreen: "#3cb371",
+ mediumslateblue: "#7b68ee",
+ mediumspringgreen: "#00fa9a",
+ mediumturquoise: "#48d1cc",
+ mediumvioletred: "#c71585",
+ midnightblue: "#191970",
+ mintcream: "#f5fffa",
+ mistyrose: "#ffe4e1",
+ moccasin: "#ffe4b5",
+ navajowhite: "#ffdead",
+ navy: "#000080",
+ oldlace: "#fdf5e6",
+ olive: "#808000",
+ olivedrab: "#6b8e23",
+ orange: "#ffa500",
+ orangered: "#ff4500",
+ orchid: "#da70d6",
+ palegoldenrod: "#eee8aa",
+ palegreen: "#98fb98",
+ paleturquoise: "#afeeee",
+ palevioletred: "#db7093",
+ papayawhip: "#ffefd5",
+ peachpuff: "#ffdab9",
+ peru: "#cd853f",
+ pink: "#ffc0cb",
+ plum: "#dda0dd",
+ powderblue: "#b0e0e6",
+ purple: "#800080",
+ red: "#ff0000",
+ rosybrown: "#bc8f8f",
+ royalblue: "#4169e1",
+ saddlebrown: "#8b4513",
+ salmon: "#fa8072",
+ sandybrown: "#f4a460",
+ seagreen: "#2e8b57",
+ seashell: "#fff5ee",
+ sienna: "#a0522d",
+ silver: "#c0c0c0",
+ skyblue: "#87ceeb",
+ slateblue: "#6a5acd",
+ slategray: "#708090",
+ slategrey: "#708090",
+ snow: "#fffafa",
+ springgreen: "#00ff7f",
+ steelblue: "#4682b4",
+ tan: "#d2b48c",
+ teal: "#008080",
+ thistle: "#d8bfd8",
+ tomato: "#ff6347",
+ turquoise: "#40e0d0",
+ violet: "#ee82ee",
+ wheat: "#f5deb3",
+ white: "#ffffff",
+ whitesmoke: "#f5f5f5",
+ yellow: "#ffff00",
+ yellowgreen: "#9acd32"
+};
+
+for (var d3_rgb_name in d3_rgb_names) {
+ d3_rgb_names[d3_rgb_name] = d3_rgb_parse(
+ d3_rgb_names[d3_rgb_name],
+ d3_rgb,
+ d3_hsl_rgb);
+}
+d3.hsl = function(h, s, l) {
+ return arguments.length === 1
+ ? (h instanceof d3_Hsl ? d3_hsl(h.h, h.s, h.l)
+ : d3_rgb_parse("" + h, d3_rgb_hsl, d3_hsl))
+ : d3_hsl(+h, +s, +l);
+};
+
+function d3_hsl(h, s, l) {
+ return new d3_Hsl(h, s, l);
+}
+
+function d3_Hsl(h, s, l) {
+ this.h = h;
+ this.s = s;
+ this.l = l;
+}
+
+d3_Hsl.prototype.brighter = function(k) {
+ k = Math.pow(0.7, arguments.length ? k : 1);
+ return d3_hsl(this.h, this.s, this.l / k);
+};
+
+d3_Hsl.prototype.darker = function(k) {
+ k = Math.pow(0.7, arguments.length ? k : 1);
+ return d3_hsl(this.h, this.s, k * this.l);
+};
+
+d3_Hsl.prototype.rgb = function() {
+ return d3_hsl_rgb(this.h, this.s, this.l);
+};
+
+d3_Hsl.prototype.toString = function() {
+ return this.rgb().toString();
+};
+
+function d3_hsl_rgb(h, s, l) {
+ var m1,
+ m2;
+
+ /* Some simple corrections for h, s and l. */
+ h = h % 360; if (h < 0) h += 360;
+ s = s < 0 ? 0 : s > 1 ? 1 : s;
+ l = l < 0 ? 0 : l > 1 ? 1 : l;
+
+ /* From FvD 13.37, CSS Color Module Level 3 */
+ m2 = l <= .5 ? l * (1 + s) : l + s - l * s;
+ m1 = 2 * l - m2;
+
+ function v(h) {
+ if (h > 360) h -= 360;
+ else if (h < 0) h += 360;
+ if (h < 60) return m1 + (m2 - m1) * h / 60;
+ if (h < 180) return m2;
+ if (h < 240) return m1 + (m2 - m1) * (240 - h) / 60;
+ return m1;
+ }
+
+ function vv(h) {
+ return Math.round(v(h) * 255);
+ }
+
+ return d3_rgb(vv(h + 120), vv(h), vv(h - 120));
+}
+function d3_selection(groups) {
+ d3_arraySubclass(groups, d3_selectionPrototype);
+ return groups;
+}
+
+var d3_select = function(s, n) { return n.querySelector(s); },
+ d3_selectAll = function(s, n) { return n.querySelectorAll(s); };
+
+// Prefer Sizzle, if available.
+if (typeof Sizzle === "function") {
+ d3_select = function(s, n) { return Sizzle(s, n)[0]; };
+ d3_selectAll = function(s, n) { return Sizzle.uniqueSort(Sizzle(s, n)); };
+}
+
+var d3_selectionPrototype = [];
+
+d3.selection = function() {
+ return d3_selectionRoot;
+};
+
+d3.selection.prototype = d3_selectionPrototype;
+d3_selectionPrototype.select = function(selector) {
+ var subgroups = [],
+ subgroup,
+ subnode,
+ group,
+ node;
+
+ if (typeof selector !== "function") selector = d3_selection_selector(selector);
+
+ for (var j = -1, m = this.length; ++j < m;) {
+ subgroups.push(subgroup = []);
+ subgroup.parentNode = (group = this[j]).parentNode;
+ for (var i = -1, n = group.length; ++i < n;) {
+ if (node = group[i]) {
+ subgroup.push(subnode = selector.call(node, node.__data__, i));
+ if (subnode && "__data__" in node) subnode.__data__ = node.__data__;
+ } else {
+ subgroup.push(null);
+ }
+ }
+ }
+
+ return d3_selection(subgroups);
+};
+
+function d3_selection_selector(selector) {
+ return function() {
+ return d3_select(selector, this);
+ };
+}
+d3_selectionPrototype.selectAll = function(selector) {
+ var subgroups = [],
+ subgroup,
+ node;
+
+ if (typeof selector !== "function") selector = d3_selection_selectorAll(selector);
+
+ for (var j = -1, m = this.length; ++j < m;) {
+ for (var group = this[j], i = -1, n = group.length; ++i < n;) {
+ if (node = group[i]) {
+ subgroups.push(subgroup = d3_array(selector.call(node, node.__data__, i)));
+ subgroup.parentNode = node;
+ }
+ }
+ }
+
+ return d3_selection(subgroups);
+};
+
+function d3_selection_selectorAll(selector) {
+ return function() {
+ return d3_selectAll(selector, this);
+ };
+}
+d3_selectionPrototype.attr = function(name, value) {
+ name = d3.ns.qualify(name);
+
+ // If no value is specified, return the first value.
+ if (arguments.length < 2) {
+ var node = this.node();
+ return name.local
+ ? node.getAttributeNS(name.space, name.local)
+ : node.getAttribute(name);
+ }
+
+ function attrNull() {
+ this.removeAttribute(name);
+ }
+
+ function attrNullNS() {
+ this.removeAttributeNS(name.space, name.local);
+ }
+
+ function attrConstant() {
+ this.setAttribute(name, value);
+ }
+
+ function attrConstantNS() {
+ this.setAttributeNS(name.space, name.local, value);
+ }
+
+ function attrFunction() {
+ var x = value.apply(this, arguments);
+ if (x == null) this.removeAttribute(name);
+ else this.setAttribute(name, x);
+ }
+
+ function attrFunctionNS() {
+ var x = value.apply(this, arguments);
+ if (x == null) this.removeAttributeNS(name.space, name.local);
+ else this.setAttributeNS(name.space, name.local, x);
+ }
+
+ return this.each(value == null
+ ? (name.local ? attrNullNS : attrNull) : (typeof value === "function"
+ ? (name.local ? attrFunctionNS : attrFunction)
+ : (name.local ? attrConstantNS : attrConstant)));
+};
+d3_selectionPrototype.classed = function(name, value) {
+ var names = name.split(d3_selection_classedWhitespace),
+ n = names.length,
+ i = -1;
+ if (arguments.length > 1) {
+ while (++i < n) d3_selection_classed.call(this, names[i], value);
+ return this;
+ } else {
+ while (++i < n) if (!d3_selection_classed.call(this, names[i])) return false;
+ return true;
+ }
+};
+
+var d3_selection_classedWhitespace = /\s+/g;
+
+function d3_selection_classed(name, value) {
+ var re = new RegExp("(^|\\s+)" + d3.requote(name) + "(\\s+|$)", "g");
+
+ // If no value is specified, return the first value.
+ if (arguments.length < 2) {
+ var node = this.node();
+ if (c = node.classList) return c.contains(name);
+ var c = node.className;
+ re.lastIndex = 0;
+ return re.test(c.baseVal != null ? c.baseVal : c);
+ }
+
+ function classedAdd() {
+ if (c = this.classList) return c.add(name);
+ var c = this.className,
+ cb = c.baseVal != null,
+ cv = cb ? c.baseVal : c;
+ re.lastIndex = 0;
+ if (!re.test(cv)) {
+ cv = d3_collapse(cv + " " + name);
+ if (cb) c.baseVal = cv;
+ else this.className = cv;
+ }
+ }
+
+ function classedRemove() {
+ if (c = this.classList) return c.remove(name);
+ var c = this.className,
+ cb = c.baseVal != null,
+ cv = cb ? c.baseVal : c;
+ cv = d3_collapse(cv.replace(re, " "));
+ if (cb) c.baseVal = cv;
+ else this.className = cv;
+ }
+
+ function classedFunction() {
+ (value.apply(this, arguments)
+ ? classedAdd
+ : classedRemove).call(this);
+ }
+
+ return this.each(typeof value === "function"
+ ? classedFunction : value
+ ? classedAdd
+ : classedRemove);
+}
+d3_selectionPrototype.style = function(name, value, priority) {
+ if (arguments.length < 3) priority = "";
+
+ // If no value is specified, return the first value.
+ if (arguments.length < 2) return window
+ .getComputedStyle(this.node(), null)
+ .getPropertyValue(name);
+
+ function styleNull() {
+ this.style.removeProperty(name);
+ }
+
+ function styleConstant() {
+ this.style.setProperty(name, value, priority);
+ }
+
+ function styleFunction() {
+ var x = value.apply(this, arguments);
+ if (x == null) this.style.removeProperty(name);
+ else this.style.setProperty(name, x, priority);
+ }
+
+ return this.each(value == null
+ ? styleNull : (typeof value === "function"
+ ? styleFunction : styleConstant));
+};
+d3_selectionPrototype.property = function(name, value) {
+
+ // If no value is specified, return the first value.
+ if (arguments.length < 2) return this.node()[name];
+
+ function propertyNull() {
+ delete this[name];
+ }
+
+ function propertyConstant() {
+ this[name] = value;
+ }
+
+ function propertyFunction() {
+ var x = value.apply(this, arguments);
+ if (x == null) delete this[name];
+ else this[name] = x;
+ }
+
+ return this.each(value == null
+ ? propertyNull : (typeof value === "function"
+ ? propertyFunction : propertyConstant));
+};
+d3_selectionPrototype.text = function(value) {
+ return arguments.length < 1 ? this.node().textContent
+ : (this.each(typeof value === "function"
+ ? function() { this.textContent = value.apply(this, arguments); }
+ : function() { this.textContent = value; }));
+};
+d3_selectionPrototype.html = function(value) {
+ return arguments.length < 1 ? this.node().innerHTML
+ : (this.each(typeof value === "function"
+ ? function() { this.innerHTML = value.apply(this, arguments); }
+ : function() { this.innerHTML = value; }));
+};
+// TODO append(node)?
+// TODO append(function)?
+d3_selectionPrototype.append = function(name) {
+ name = d3.ns.qualify(name);
+
+ function append() {
+ return this.appendChild(document.createElement(name));
+ }
+
+ function appendNS() {
+ return this.appendChild(document.createElementNS(name.space, name.local));
+ }
+
+ return this.select(name.local ? appendNS : append);
+};
+// TODO insert(node, function)?
+// TODO insert(function, string)?
+// TODO insert(function, function)?
+d3_selectionPrototype.insert = function(name, before) {
+ name = d3.ns.qualify(name);
+
+ function insert() {
+ return this.insertBefore(
+ document.createElement(name),
+ d3_select(before, this));
+ }
+
+ function insertNS() {
+ return this.insertBefore(
+ document.createElementNS(name.space, name.local),
+ d3_select(before, this));
+ }
+
+ return this.select(name.local ? insertNS : insert);
+};
+// TODO remove(selector)?
+// TODO remove(node)?
+// TODO remove(function)?
+d3_selectionPrototype.remove = function() {
+ return this.each(function() {
+ var parent = this.parentNode;
+ if (parent) parent.removeChild(this);
+ });
+};
+// TODO data(null) for clearing data?
+d3_selectionPrototype.data = function(data, join) {
+ var enter = [],
+ update = [],
+ exit = [];
+
+ function bind(group, groupData) {
+ var i,
+ n = group.length,
+ m = groupData.length,
+ n0 = Math.min(n, m),
+ n1 = Math.max(n, m),
+ updateNodes = [],
+ enterNodes = [],
+ exitNodes = [],
+ node,
+ nodeData;
+
+ if (join) {
+ var nodeByKey = {},
+ keys = [],
+ key,
+ j = groupData.length;
+
+ for (i = -1; ++i < n;) {
+ key = join.call(node = group[i], node.__data__, i);
+ if (key in nodeByKey) {
+ exitNodes[j++] = node; // duplicate key
+ } else {
+ nodeByKey[key] = node;
+ }
+ keys.push(key);
+ }
+
+ for (i = -1; ++i < m;) {
+ node = nodeByKey[key = join.call(groupData, nodeData = groupData[i], i)];
+ if (node) {
+ node.__data__ = nodeData;
+ updateNodes[i] = node;
+ enterNodes[i] = exitNodes[i] = null;
+ } else {
+ enterNodes[i] = d3_selection_dataNode(nodeData);
+ updateNodes[i] = exitNodes[i] = null;
+ }
+ delete nodeByKey[key];
+ }
+
+ for (i = -1; ++i < n;) {
+ if (keys[i] in nodeByKey) {
+ exitNodes[i] = group[i];
+ }
+ }
+ } else {
+ for (i = -1; ++i < n0;) {
+ node = group[i];
+ nodeData = groupData[i];
+ if (node) {
+ node.__data__ = nodeData;
+ updateNodes[i] = node;
+ enterNodes[i] = exitNodes[i] = null;
+ } else {
+ enterNodes[i] = d3_selection_dataNode(nodeData);
+ updateNodes[i] = exitNodes[i] = null;
+ }
+ }
+ for (; i < m; ++i) {
+ enterNodes[i] = d3_selection_dataNode(groupData[i]);
+ updateNodes[i] = exitNodes[i] = null;
+ }
+ for (; i < n1; ++i) {
+ exitNodes[i] = group[i];
+ enterNodes[i] = updateNodes[i] = null;
+ }
+ }
+
+ enterNodes.update
+ = updateNodes;
+
+ enterNodes.parentNode
+ = updateNodes.parentNode
+ = exitNodes.parentNode
+ = group.parentNode;
+
+ enter.push(enterNodes);
+ update.push(updateNodes);
+ exit.push(exitNodes);
+ }
+
+ var i = -1,
+ n = this.length,
+ group;
+ if (typeof data === "function") {
+ while (++i < n) {
+ bind(group = this[i], data.call(group, group.parentNode.__data__, i));
+ }
+ } else {
+ while (++i < n) {
+ bind(group = this[i], data);
+ }
+ }
+
+ var selection = d3_selection(update);
+ selection.enter = function() { return d3_selection_enter(enter); };
+ selection.exit = function() { return d3_selection(exit); };
+ return selection;
+};
+
+function d3_selection_dataNode(data) {
+ return {__data__: data};
+}
+function d3_selection_enter(selection) {
+ d3_arraySubclass(selection, d3_selection_enterPrototype);
+ return selection;
+}
+
+var d3_selection_enterPrototype = [];
+
+d3_selection_enterPrototype.append = d3_selectionPrototype.append;
+d3_selection_enterPrototype.insert = d3_selectionPrototype.insert;
+d3_selection_enterPrototype.empty = d3_selectionPrototype.empty;
+d3_selection_enterPrototype.select = function(selector) {
+ var subgroups = [],
+ subgroup,
+ subnode,
+ upgroup,
+ group,
+ node;
+
+ for (var j = -1, m = this.length; ++j < m;) {
+ upgroup = (group = this[j]).update;
+ subgroups.push(subgroup = []);
+ subgroup.parentNode = group.parentNode;
+ for (var i = -1, n = group.length; ++i < n;) {
+ if (node = group[i]) {
+ subgroup.push(upgroup[i] = subnode = selector.call(group.parentNode, node.__data__, i));
+ subnode.__data__ = node.__data__;
+ } else {
+ subgroup.push(null);
+ }
+ }
+ }
+
+ return d3_selection(subgroups);
+};
+// TODO preserve null elements to maintain index?
+d3_selectionPrototype.filter = function(filter) {
+ var subgroups = [],
+ subgroup,
+ group,
+ node;
+
+ for (var j = 0, m = this.length; j < m; j++) {
+ subgroups.push(subgroup = []);
+ subgroup.parentNode = (group = this[j]).parentNode;
+ for (var i = 0, n = group.length; i < n; i++) {
+ if ((node = group[i]) && filter.call(node, node.__data__, i)) {
+ subgroup.push(node);
+ }
+ }
+ }
+
+ return d3_selection(subgroups);
+};
+d3_selectionPrototype.map = function(map) {
+ return this.each(function() {
+ this.__data__ = map.apply(this, arguments);
+ });
+};
+d3_selectionPrototype.sort = function(comparator) {
+ comparator = d3_selection_sortComparator.apply(this, arguments);
+ for (var j = 0, m = this.length; j < m; j++) {
+ for (var group = this[j].sort(comparator), i = 1, n = group.length, prev = group[0]; i < n; i++) {
+ var node = group[i];
+ if (node) {
+ if (prev) prev.parentNode.insertBefore(node, prev.nextSibling);
+ prev = node;
+ }
+ }
+ }
+ return this;
+};
+
+function d3_selection_sortComparator(comparator) {
+ if (!arguments.length) comparator = d3.ascending;
+ return function(a, b) {
+ return comparator(a && a.__data__, b && b.__data__);
+ };
+}
+// type can be namespaced, e.g., "click.foo"
+// listener can be null for removal
+d3_selectionPrototype.on = function(type, listener, capture) {
+ if (arguments.length < 3) capture = false;
+
+ // parse the type specifier
+ var name = "__on" + type, i = type.indexOf(".");
+ if (i > 0) type = type.substring(0, i);
+
+ // if called with only one argument, return the current listener
+ if (arguments.length < 2) return (i = this.node()[name]) && i._;
+
+ // remove the old event listener, and add the new event listener
+ return this.each(function(d, i) {
+ var node = this;
+
+ if (node[name]) node.removeEventListener(type, node[name], capture);
+ if (listener) node.addEventListener(type, node[name] = l, capture);
+
+ // wrapped event listener that preserves i
+ function l(e) {
+ var o = d3.event; // Events can be reentrant (e.g., focus).
+ d3.event = e;
+ try {
+ listener.call(node, node.__data__, i);
+ } finally {
+ d3.event = o;
+ }
+ }
+
+ // stash the unwrapped listener for retrieval
+ l._ = listener;
+ });
+};
+d3_selectionPrototype.each = function(callback) {
+ for (var j = -1, m = this.length; ++j < m;) {
+ for (var group = this[j], i = -1, n = group.length; ++i < n;) {
+ var node = group[i];
+ if (node) callback.call(node, node.__data__, i, j);
+ }
+ }
+ return this;
+};
+//
+// Note: assigning to the arguments array simultaneously changes the value of
+// the corresponding argument!
+//
+// TODO The `this` argument probably shouldn't be the first argument to the
+// callback, anyway, since it's redundant. However, that will require a major
+// version bump due to backwards compatibility, so I'm not changing it right
+// away.
+//
+d3_selectionPrototype.call = function(callback) {
+ callback.apply(this, (arguments[0] = this, arguments));
+ return this;
+};
+d3_selectionPrototype.empty = function() {
+ return !this.node();
+};
+d3_selectionPrototype.node = function(callback) {
+ for (var j = 0, m = this.length; j < m; j++) {
+ for (var group = this[j], i = 0, n = group.length; i < n; i++) {
+ var node = group[i];
+ if (node) return node;
+ }
+ }
+ return null;
+};
+d3_selectionPrototype.transition = function() {
+ var subgroups = [],
+ subgroup,
+ node;
+
+ for (var j = -1, m = this.length; ++j < m;) {
+ subgroups.push(subgroup = []);
+ for (var group = this[j], i = -1, n = group.length; ++i < n;) {
+ subgroup.push((node = group[i]) ? {node: node, delay: 0, duration: 250} : null);
+ }
+ }
+
+ return d3_transition(subgroups, d3_transitionInheritId || ++d3_transitionId);
+};
+var d3_selectionRoot = d3_selection([[document]]);
+
+d3_selectionRoot[0].parentNode = document.documentElement;
+
+// TODO fast singleton implementation!
+// TODO select(function)
+d3.select = function(selector) {
+ return typeof selector === "string"
+ ? d3_selectionRoot.select(selector)
+ : d3_selection([[selector]]); // assume node
+};
+
+// TODO selectAll(function)
+d3.selectAll = function(selector) {
+ return typeof selector === "string"
+ ? d3_selectionRoot.selectAll(selector)
+ : d3_selection([d3_array(selector)]); // assume node[]
+};
+function d3_transition(groups, id) {
+ d3_arraySubclass(groups, d3_transitionPrototype);
+
+ var tweens = {},
+ event = d3.dispatch("start", "end"),
+ ease = d3_transitionEase,
+ then = Date.now();
+
+ groups.id = id;
+
+ groups.tween = function(name, tween) {
+ if (arguments.length < 2) return tweens[name];
+ if (tween == null) delete tweens[name];
+ else tweens[name] = tween;
+ return groups;
+ };
+
+ groups.ease = function(value) {
+ if (!arguments.length) return ease;
+ ease = typeof value === "function" ? value : d3.ease.apply(d3, arguments);
+ return groups;
+ };
+
+ groups.each = function(type, listener) {
+ if (arguments.length < 2) return d3_transition_each.call(groups, type);
+ event[type].add(listener);
+ return groups;
+ };
+
+ d3.timer(function(elapsed) {
+ groups.each(function(d, i, j) {
+ var tweened = [],
+ node = this,
+ delay = groups[j][i].delay,
+ duration = groups[j][i].duration,
+ lock = node.__transition__ || (node.__transition__ = {active: 0, count: 0});
+
+ ++lock.count;
+
+ delay <= elapsed ? start(elapsed) : d3.timer(start, delay, then);
+
+ function start(elapsed) {
+ if (lock.active > id) return stop();
+ lock.active = id;
+
+ for (var tween in tweens) {
+ if (tween = tweens[tween].call(node, d, i)) {
+ tweened.push(tween);
+ }
+ }
+
+ event.start.dispatch.call(node, d, i);
+ if (!tick(elapsed)) d3.timer(tick, 0, then);
+ return 1;
+ }
+
+ function tick(elapsed) {
+ if (lock.active !== id) return stop();
+
+ var t = (elapsed - delay) / duration,
+ e = ease(t),
+ n = tweened.length;
+
+ while (n > 0) {
+ tweened[--n].call(node, e);
+ }
+
+ if (t >= 1) {
+ stop();
+ d3_transitionInheritId = id;
+ event.end.dispatch.call(node, d, i);
+ d3_transitionInheritId = 0;
+ return 1;
+ }
+ }
+
+ function stop() {
+ if (!--lock.count) delete node.__transition__;
+ return 1;
+ }
+ });
+ return 1;
+ }, 0, then);
+
+ return groups;
+}
+
+var d3_transitionRemove = {};
+
+function d3_transitionNull(d, i, a) {
+ return a != "" && d3_transitionRemove;
+}
+
+function d3_transitionTween(b) {
+
+ function transitionFunction(d, i, a) {
+ var v = b.call(this, d, i);
+ return v == null
+ ? a != "" && d3_transitionRemove
+ : a != v && d3.interpolate(a, v);
+ }
+
+ function transitionString(d, i, a) {
+ return a != b && d3.interpolate(a, b);
+ }
+
+ return typeof b === "function" ? transitionFunction
+ : b == null ? d3_transitionNull
+ : (b += "", transitionString);
+}
+
+var d3_transitionPrototype = [],
+ d3_transitionId = 0,
+ d3_transitionInheritId = 0,
+ d3_transitionEase = d3.ease("cubic-in-out");
+
+d3_transitionPrototype.call = d3_selectionPrototype.call;
+
+d3.transition = function() {
+ return d3_selectionRoot.transition();
+};
+
+d3.transition.prototype = d3_transitionPrototype;
+d3_transitionPrototype.select = function(selector) {
+ var subgroups = [],
+ subgroup,
+ subnode,
+ node;
+
+ if (typeof selector !== "function") selector = d3_selection_selector(selector);
+
+ for (var j = -1, m = this.length; ++j < m;) {
+ subgroups.push(subgroup = []);
+ for (var group = this[j], i = -1, n = group.length; ++i < n;) {
+ if ((node = group[i]) && (subnode = selector.call(node.node, node.node.__data__, i))) {
+ if ("__data__" in node.node) subnode.__data__ = node.node.__data__;
+ subgroup.push({node: subnode, delay: node.delay, duration: node.duration});
+ } else {
+ subgroup.push(null);
+ }
+ }
+ }
+
+ return d3_transition(subgroups, this.id).ease(this.ease());
+};
+d3_transitionPrototype.selectAll = function(selector) {
+ var subgroups = [],
+ subgroup,
+ subnodes,
+ node;
+
+ if (typeof selector !== "function") selector = d3_selection_selectorAll(selector);
+
+ for (var j = -1, m = this.length; ++j < m;) {
+ for (var group = this[j], i = -1, n = group.length; ++i < n;) {
+ if (node = group[i]) {
+ subnodes = selector.call(node.node, node.node.__data__, i);
+ subgroups.push(subgroup = []);
+ for (var k = -1, o = subnodes.length; ++k < o;) {
+ subgroup.push({node: subnodes[k], delay: node.delay, duration: node.duration});
+ }
+ }
+ }
+ }
+
+ return d3_transition(subgroups, this.id).ease(this.ease());
+};
+d3_transitionPrototype.attr = function(name, value) {
+ return this.attrTween(name, d3_transitionTween(value));
+};
+
+d3_transitionPrototype.attrTween = function(nameNS, tween) {
+ var name = d3.ns.qualify(nameNS);
+
+ function attrTween(d, i) {
+ var f = tween.call(this, d, i, this.getAttribute(name));
+ return f === d3_transitionRemove
+ ? (this.removeAttribute(name), null)
+ : f && function(t) { this.setAttribute(name, f(t)); };
+ }
+
+ function attrTweenNS(d, i) {
+ var f = tween.call(this, d, i, this.getAttributeNS(name.space, name.local));
+ return f === d3_transitionRemove
+ ? (this.removeAttributeNS(name.space, name.local), null)
+ : f && function(t) { this.setAttributeNS(name.space, name.local, f(t)); };
+ }
+
+ return this.tween("attr." + nameNS, name.local ? attrTweenNS : attrTween);
+};
+d3_transitionPrototype.style = function(name, value, priority) {
+ if (arguments.length < 3) priority = "";
+ return this.styleTween(name, d3_transitionTween(value), priority);
+};
+
+d3_transitionPrototype.styleTween = function(name, tween, priority) {
+ if (arguments.length < 3) priority = "";
+ return this.tween("style." + name, function(d, i) {
+ var f = tween.call(this, d, i, window.getComputedStyle(this, null).getPropertyValue(name));
+ return f === d3_transitionRemove
+ ? (this.style.removeProperty(name), null)
+ : f && function(t) { this.style.setProperty(name, f(t), priority); };
+ });
+};
+d3_transitionPrototype.text = function(value) {
+ return this.tween("text", function(d, i) {
+ this.textContent = typeof value === "function"
+ ? value.call(this, d, i)
+ : value;
+ });
+};
+d3_transitionPrototype.remove = function() {
+ return this.each("end", function() {
+ var p;
+ if (!this.__transition__ && (p = this.parentNode)) p.removeChild(this);
+ });
+};
+d3_transitionPrototype.delay = function(value) {
+ var groups = this;
+ return groups.each(typeof value === "function"
+ ? function(d, i, j) { groups[j][i].delay = +value.apply(this, arguments); }
+ : (value = +value, function(d, i, j) { groups[j][i].delay = value; }));
+};
+d3_transitionPrototype.duration = function(value) {
+ var groups = this;
+ return groups.each(typeof value === "function"
+ ? function(d, i, j) { groups[j][i].duration = +value.apply(this, arguments); }
+ : (value = +value, function(d, i, j) { groups[j][i].duration = value; }));
+};
+function d3_transition_each(callback) {
+ for (var j = 0, m = this.length; j < m; j++) {
+ for (var group = this[j], i = 0, n = group.length; i < n; i++) {
+ var node = group[i];
+ if (node) callback.call(node = node.node, node.__data__, i, j);
+ }
+ }
+ return this;
+}
+d3_transitionPrototype.transition = function() {
+ return this.select(d3_this);
+};
+var d3_timer_queue = null,
+ d3_timer_interval, // is an interval (or frame) active?
+ d3_timer_timeout; // is a timeout active?
+
+// The timer will continue to fire until callback returns true.
+d3.timer = function(callback, delay, then) {
+ var found = false,
+ t0,
+ t1 = d3_timer_queue;
+
+ if (arguments.length < 3) {
+ if (arguments.length < 2) delay = 0;
+ else if (!isFinite(delay)) return;
+ then = Date.now();
+ }
+
+ // See if the callback's already in the queue.
+ while (t1) {
+ if (t1.callback === callback) {
+ t1.then = then;
+ t1.delay = delay;
+ found = true;
+ break;
+ }
+ t0 = t1;
+ t1 = t1.next;
+ }
+
+ // Otherwise, add the callback to the queue.
+ if (!found) d3_timer_queue = {
+ callback: callback,
+ then: then,
+ delay: delay,
+ next: d3_timer_queue
+ };
+
+ // Start animatin'!
+ if (!d3_timer_interval) {
+ d3_timer_timeout = clearTimeout(d3_timer_timeout);
+ d3_timer_interval = 1;
+ d3_timer_frame(d3_timer_step);
+ }
+}
+
+function d3_timer_step() {
+ var elapsed,
+ now = Date.now(),
+ t1 = d3_timer_queue;
+
+ while (t1) {
+ elapsed = now - t1.then;
+ if (elapsed >= t1.delay) t1.flush = t1.callback(elapsed);
+ t1 = t1.next;
+ }
+
+ var delay = d3_timer_flush() - now;
+ if (delay > 24) {
+ if (isFinite(delay)) {
+ clearTimeout(d3_timer_timeout);
+ d3_timer_timeout = setTimeout(d3_timer_step, delay);
+ }
+ d3_timer_interval = 0;
+ } else {
+ d3_timer_interval = 1;
+ d3_timer_frame(d3_timer_step);
+ }
+}
+
+d3.timer.flush = function() {
+ var elapsed,
+ now = Date.now(),
+ t1 = d3_timer_queue;
+
+ while (t1) {
+ elapsed = now - t1.then;
+ if (!t1.delay) t1.flush = t1.callback(elapsed);
+ t1 = t1.next;
+ }
+
+ d3_timer_flush();
+};
+
+// Flush after callbacks, to avoid concurrent queue modification.
+function d3_timer_flush() {
+ var t0 = null,
+ t1 = d3_timer_queue,
+ then = Infinity;
+ while (t1) {
+ if (t1.flush) {
+ t1 = t0 ? t0.next = t1.next : d3_timer_queue = t1.next;
+ } else {
+ then = Math.min(then, t1.then + t1.delay);
+ t1 = (t0 = t1).next;
+ }
+ }
+ return then;
+}
+
+var d3_timer_frame = window.requestAnimationFrame
+ || window.webkitRequestAnimationFrame
+ || window.mozRequestAnimationFrame
+ || window.oRequestAnimationFrame
+ || window.msRequestAnimationFrame
+ || function(callback) { setTimeout(callback, 17); };
+function d3_noop() {}
+d3.scale = {};
+
+function d3_scaleExtent(domain) {
+ var start = domain[0], stop = domain[domain.length - 1];
+ return start < stop ? [start, stop] : [stop, start];
+}
+function d3_scale_nice(domain, nice) {
+ var i0 = 0,
+ i1 = domain.length - 1,
+ x0 = domain[i0],
+ x1 = domain[i1],
+ dx;
+
+ if (x1 < x0) {
+ dx = i0; i0 = i1; i1 = dx;
+ dx = x0; x0 = x1; x1 = dx;
+ }
+
+ if (dx = x1 - x0) {
+ nice = nice(dx);
+ domain[i0] = nice.floor(x0);
+ domain[i1] = nice.ceil(x1);
+ }
+
+ return domain;
+}
+
+function d3_scale_niceDefault() {
+ return Math;
+}
+d3.scale.linear = function() {
+ return d3_scale_linear([0, 1], [0, 1], d3.interpolate, false);
+};
+
+function d3_scale_linear(domain, range, interpolate, clamp) {
+ var output,
+ input;
+
+ function rescale() {
+ var linear = domain.length == 2 ? d3_scale_bilinear : d3_scale_polylinear,
+ uninterpolate = clamp ? d3_uninterpolateClamp : d3_uninterpolateNumber;
+ output = linear(domain, range, uninterpolate, interpolate);
+ input = linear(range, domain, uninterpolate, d3.interpolate);
+ return scale;
+ }
+
+ function scale(x) {
+ return output(x);
+ }
+
+ // Note: requires range is coercible to number!
+ scale.invert = function(y) {
+ return input(y);
+ };
+
+ scale.domain = function(x) {
+ if (!arguments.length) return domain;
+ domain = x.map(Number);
+ return rescale();
+ };
+
+ scale.range = function(x) {
+ if (!arguments.length) return range;
+ range = x;
+ return rescale();
+ };
+
+ scale.rangeRound = function(x) {
+ return scale.range(x).interpolate(d3.interpolateRound);
+ };
+
+ scale.clamp = function(x) {
+ if (!arguments.length) return clamp;
+ clamp = x;
+ return rescale();
+ };
+
+ scale.interpolate = function(x) {
+ if (!arguments.length) return interpolate;
+ interpolate = x;
+ return rescale();
+ };
+
+ scale.ticks = function(m) {
+ return d3_scale_linearTicks(domain, m);
+ };
+
+ scale.tickFormat = function(m) {
+ return d3_scale_linearTickFormat(domain, m);
+ };
+
+ scale.nice = function() {
+ d3_scale_nice(domain, d3_scale_linearNice);
+ return rescale();
+ };
+
+ scale.copy = function() {
+ return d3_scale_linear(domain, range, interpolate, clamp);
+ };
+
+ return rescale();
+};
+
+function d3_scale_linearRebind(scale, linear) {
+ scale.range = d3.rebind(scale, linear.range);
+ scale.rangeRound = d3.rebind(scale, linear.rangeRound);
+ scale.interpolate = d3.rebind(scale, linear.interpolate);
+ scale.clamp = d3.rebind(scale, linear.clamp);
+ return scale;
+}
+
+function d3_scale_linearNice(dx) {
+ dx = Math.pow(10, Math.round(Math.log(dx) / Math.LN10) - 1);
+ return {
+ floor: function(x) { return Math.floor(x / dx) * dx; },
+ ceil: function(x) { return Math.ceil(x / dx) * dx; }
+ };
+}
+
+// TODO Dates? Ugh.
+function d3_scale_linearTickRange(domain, m) {
+ var extent = d3_scaleExtent(domain),
+ span = extent[1] - extent[0],
+ step = Math.pow(10, Math.floor(Math.log(span / m) / Math.LN10)),
+ err = m / span * step;
+
+ // Filter ticks to get closer to the desired count.
+ if (err <= .15) step *= 10;
+ else if (err <= .35) step *= 5;
+ else if (err <= .75) step *= 2;
+
+ // Round start and stop values to step interval.
+ extent[0] = Math.ceil(extent[0] / step) * step;
+ extent[1] = Math.floor(extent[1] / step) * step + step * .5; // inclusive
+ extent[2] = step;
+ return extent;
+}
+
+function d3_scale_linearTicks(domain, m) {
+ return d3.range.apply(d3, d3_scale_linearTickRange(domain, m));
+}
+
+function d3_scale_linearTickFormat(domain, m) {
+ return d3.format(",." + Math.max(0, -Math.floor(Math.log(d3_scale_linearTickRange(domain, m)[2]) / Math.LN10 + .01)) + "f");
+}
+function d3_scale_bilinear(domain, range, uninterpolate, interpolate) {
+ var u = uninterpolate(domain[0], domain[1]),
+ i = interpolate(range[0], range[1]);
+ return function(x) {
+ return i(u(x));
+ };
+}
+function d3_scale_polylinear(domain, range, uninterpolate, interpolate) {
+ var u = [],
+ i = [],
+ j = 0,
+ n = domain.length;
+
+ while (++j < n) {
+ u.push(uninterpolate(domain[j - 1], domain[j]));
+ i.push(interpolate(range[j - 1], range[j]));
+ }
+
+ return function(x) {
+ var j = d3.bisect(domain, x, 1, domain.length - 1) - 1;
+ return i[j](u[j](x));
+ };
+}
+d3.scale.log = function() {
+ return d3_scale_log(d3.scale.linear(), d3_scale_logp);
+};
+
+function d3_scale_log(linear, log) {
+ var pow = log.pow;
+
+ function scale(x) {
+ return linear(log(x));
+ }
+
+ scale.invert = function(x) {
+ return pow(linear.invert(x));
+ };
+
+ scale.domain = function(x) {
+ if (!arguments.length) return linear.domain().map(pow);
+ log = x[0] < 0 ? d3_scale_logn : d3_scale_logp;
+ pow = log.pow;
+ linear.domain(x.map(log));
+ return scale;
+ };
+
+ scale.nice = function() {
+ linear.domain(d3_scale_nice(linear.domain(), d3_scale_niceDefault));
+ return scale;
+ };
+
+ scale.ticks = function() {
+ var extent = d3_scaleExtent(linear.domain()),
+ ticks = [];
+ if (extent.every(isFinite)) {
+ var i = Math.floor(extent[0]),
+ j = Math.ceil(extent[1]),
+ u = Math.round(pow(extent[0])),
+ v = Math.round(pow(extent[1]));
+ if (log === d3_scale_logn) {
+ ticks.push(pow(i));
+ for (; i++ < j;) for (var k = 9; k > 0; k--) ticks.push(pow(i) * k);
+ } else {
+ for (; i < j; i++) for (var k = 1; k < 10; k++) ticks.push(pow(i) * k);
+ ticks.push(pow(i));
+ }
+ for (i = 0; ticks[i] < u; i++) {} // strip small values
+ for (j = ticks.length; ticks[j - 1] > v; j--) {} // strip big values
+ ticks = ticks.slice(i, j);
+ }
+ return ticks;
+ };
+
+ scale.tickFormat = function(n, format) {
+ if (arguments.length < 2) format = d3_scale_logFormat;
+ if (arguments.length < 1) return format;
+ var k = n / scale.ticks().length,
+ f = log === d3_scale_logn ? (e = -1e-15, Math.floor) : (e = 1e-15, Math.ceil),
+ e;
+ return function(d) {
+ return d / pow(f(log(d) + e)) < k ? format(d) : "";
+ };
+ };
+
+ scale.copy = function() {
+ return d3_scale_log(linear.copy(), log);
+ };
+
+ return d3_scale_linearRebind(scale, linear);
+};
+
+var d3_scale_logFormat = d3.format("e");
+
+function d3_scale_logp(x) {
+ return Math.log(x) / Math.LN10;
+}
+
+function d3_scale_logn(x) {
+ return -Math.log(-x) / Math.LN10;
+}
+
+d3_scale_logp.pow = function(x) {
+ return Math.pow(10, x);
+};
+
+d3_scale_logn.pow = function(x) {
+ return -Math.pow(10, -x);
+};
+d3.scale.pow = function() {
+ return d3_scale_pow(d3.scale.linear(), 1);
+};
+
+function d3_scale_pow(linear, exponent) {
+ var powp = d3_scale_powPow(exponent),
+ powb = d3_scale_powPow(1 / exponent);
+
+ function scale(x) {
+ return linear(powp(x));
+ }
+
+ scale.invert = function(x) {
+ return powb(linear.invert(x));
+ };
+
+ scale.domain = function(x) {
+ if (!arguments.length) return linear.domain().map(powb);
+ linear.domain(x.map(powp));
+ return scale;
+ };
+
+ scale.ticks = function(m) {
+ return d3_scale_linearTicks(scale.domain(), m);
+ };
+
+ scale.tickFormat = function(m) {
+ return d3_scale_linearTickFormat(scale.domain(), m);
+ };
+
+ scale.nice = function() {
+ return scale.domain(d3_scale_nice(scale.domain(), d3_scale_linearNice));
+ };
+
+ scale.exponent = function(x) {
+ if (!arguments.length) return exponent;
+ var domain = scale.domain();
+ powp = d3_scale_powPow(exponent = x);
+ powb = d3_scale_powPow(1 / exponent);
+ return scale.domain(domain);
+ };
+
+ scale.copy = function() {
+ return d3_scale_pow(linear.copy(), exponent);
+ };
+
+ return d3_scale_linearRebind(scale, linear);
+};
+
+function d3_scale_powPow(e) {
+ return function(x) {
+ return x < 0 ? -Math.pow(-x, e) : Math.pow(x, e);
+ };
+}
+d3.scale.sqrt = function() {
+ return d3.scale.pow().exponent(.5);
+};
+d3.scale.ordinal = function() {
+ return d3_scale_ordinal([], {t: "range", x: []});
+};
+
+function d3_scale_ordinal(domain, ranger) {
+ var index,
+ range,
+ rangeBand;
+
+ function scale(x) {
+ return range[((index[x] || (index[x] = domain.push(x))) - 1) % range.length];
+ }
+
+ scale.domain = function(x) {
+ if (!arguments.length) return domain;
+ domain = [];
+ index = {};
+ var i = -1, n = x.length, xi;
+ while (++i < n) if (!index[xi = x[i]]) index[xi] = domain.push(xi);
+ return scale[ranger.t](ranger.x, ranger.p);
+ };
+
+ scale.range = function(x) {
+ if (!arguments.length) return range;
+ range = x;
+ rangeBand = 0;
+ ranger = {t: "range", x: x};
+ return scale;
+ };
+
+ scale.rangePoints = function(x, padding) {
+ if (arguments.length < 2) padding = 0;
+ var start = x[0],
+ stop = x[1],
+ step = (stop - start) / (domain.length - 1 + padding);
+ range = domain.length < 2 ? [(start + stop) / 2] : d3.range(start + step * padding / 2, stop + step / 2, step);
+ rangeBand = 0;
+ ranger = {t: "rangePoints", x: x, p: padding};
+ return scale;
+ };
+
+ scale.rangeBands = function(x, padding) {
+ if (arguments.length < 2) padding = 0;
+ var start = x[0],
+ stop = x[1],
+ step = (stop - start) / (domain.length + padding);
+ range = d3.range(start + step * padding, stop, step);
+ rangeBand = step * (1 - padding);
+ ranger = {t: "rangeBands", x: x, p: padding};
+ return scale;
+ };
+
+ scale.rangeRoundBands = function(x, padding) {
+ if (arguments.length < 2) padding = 0;
+ var start = x[0],
+ stop = x[1],
+ step = Math.floor((stop - start) / (domain.length + padding)),
+ err = stop - start - (domain.length - padding) * step;
+ range = d3.range(start + Math.round(err / 2), stop, step);
+ rangeBand = Math.round(step * (1 - padding));
+ ranger = {t: "rangeRoundBands", x: x, p: padding};
+ return scale;
+ };
+
+ scale.rangeBand = function() {
+ return rangeBand;
+ };
+
+ scale.copy = function() {
+ return d3_scale_ordinal(domain, ranger);
+ };
+
+ return scale.domain(domain);
+};
+/*
+ * This product includes color specifications and designs developed by Cynthia
+ * Brewer (http://colorbrewer.org/). See lib/colorbrewer for more information.
+ */
+
+d3.scale.category10 = function() {
+ return d3.scale.ordinal().range(d3_category10);
+};
+
+d3.scale.category20 = function() {
+ return d3.scale.ordinal().range(d3_category20);
+};
+
+d3.scale.category20b = function() {
+ return d3.scale.ordinal().range(d3_category20b);
+};
+
+d3.scale.category20c = function() {
+ return d3.scale.ordinal().range(d3_category20c);
+};
+
+var d3_category10 = [
+ "#1f77b4", "#ff7f0e", "#2ca02c", "#d62728", "#9467bd",
+ "#8c564b", "#e377c2", "#7f7f7f", "#bcbd22", "#17becf"
+];
+
+var d3_category20 = [
+ "#1f77b4", "#aec7e8",
+ "#ff7f0e", "#ffbb78",
+ "#2ca02c", "#98df8a",
+ "#d62728", "#ff9896",
+ "#9467bd", "#c5b0d5",
+ "#8c564b", "#c49c94",
+ "#e377c2", "#f7b6d2",
+ "#7f7f7f", "#c7c7c7",
+ "#bcbd22", "#dbdb8d",
+ "#17becf", "#9edae5"
+];
+
+var d3_category20b = [
+ "#393b79", "#5254a3", "#6b6ecf", "#9c9ede",
+ "#637939", "#8ca252", "#b5cf6b", "#cedb9c",
+ "#8c6d31", "#bd9e39", "#e7ba52", "#e7cb94",
+ "#843c39", "#ad494a", "#d6616b", "#e7969c",
+ "#7b4173", "#a55194", "#ce6dbd", "#de9ed6"
+];
+
+var d3_category20c = [
+ "#3182bd", "#6baed6", "#9ecae1", "#c6dbef",
+ "#e6550d", "#fd8d3c", "#fdae6b", "#fdd0a2",
+ "#31a354", "#74c476", "#a1d99b", "#c7e9c0",
+ "#756bb1", "#9e9ac8", "#bcbddc", "#dadaeb",
+ "#636363", "#969696", "#bdbdbd", "#d9d9d9"
+];
+d3.scale.quantile = function() {
+ return d3_scale_quantile([], []);
+};
+
+function d3_scale_quantile(domain, range) {
+ var thresholds;
+
+ function rescale() {
+ var k = 0,
+ n = domain.length,
+ q = range.length;
+ thresholds = [];
+ while (++k < q) thresholds[k - 1] = d3.quantile(domain, k / q);
+ return scale;
+ }
+
+ function scale(x) {
+ if (isNaN(x = +x)) return NaN;
+ return range[d3.bisect(thresholds, x)];
+ }
+
+ scale.domain = function(x) {
+ if (!arguments.length) return domain;
+ domain = x.filter(function(d) { return !isNaN(d); }).sort(d3.ascending);
+ return rescale();
+ };
+
+ scale.range = function(x) {
+ if (!arguments.length) return range;
+ range = x;
+ return rescale();
+ };
+
+ scale.quantiles = function() {
+ return thresholds;
+ };
+
+ scale.copy = function() {
+ return d3_scale_quantile(domain, range); // copy on write!
+ };
+
+ return rescale();
+};
+d3.scale.quantize = function() {
+ return d3_scale_quantize(0, 1, [0, 1]);
+};
+
+function d3_scale_quantize(x0, x1, range) {
+ var kx, i;
+
+ function scale(x) {
+ return range[Math.max(0, Math.min(i, Math.floor(kx * (x - x0))))];
+ }
+
+ function rescale() {
+ kx = range.length / (x1 - x0);
+ i = range.length - 1;
+ return scale;
+ }
+
+ scale.domain = function(x) {
+ if (!arguments.length) return [x0, x1];
+ x0 = +x[0];
+ x1 = +x[x.length - 1];
+ return rescale();
+ };
+
+ scale.range = function(x) {
+ if (!arguments.length) return range;
+ range = x;
+ return rescale();
+ };
+
+ scale.copy = function() {
+ return d3_scale_quantize(x0, x1, range); // copy on write
+ };
+
+ return rescale();
+};
+d3.svg = {};
+d3.svg.arc = function() {
+ var innerRadius = d3_svg_arcInnerRadius,
+ outerRadius = d3_svg_arcOuterRadius,
+ startAngle = d3_svg_arcStartAngle,
+ endAngle = d3_svg_arcEndAngle;
+
+ function arc() {
+ var r0 = innerRadius.apply(this, arguments),
+ r1 = outerRadius.apply(this, arguments),
+ a0 = startAngle.apply(this, arguments) + d3_svg_arcOffset,
+ a1 = endAngle.apply(this, arguments) + d3_svg_arcOffset,
+ da = (a1 < a0 && (da = a0, a0 = a1, a1 = da), a1 - a0),
+ df = da < Math.PI ? "0" : "1",
+ c0 = Math.cos(a0),
+ s0 = Math.sin(a0),
+ c1 = Math.cos(a1),
+ s1 = Math.sin(a1);
+ return da >= d3_svg_arcMax
+ ? (r0
+ ? "M0," + r1
+ + "A" + r1 + "," + r1 + " 0 1,1 0," + (-r1)
+ + "A" + r1 + "," + r1 + " 0 1,1 0," + r1
+ + "M0," + r0
+ + "A" + r0 + "," + r0 + " 0 1,0 0," + (-r0)
+ + "A" + r0 + "," + r0 + " 0 1,0 0," + r0
+ + "Z"
+ : "M0," + r1
+ + "A" + r1 + "," + r1 + " 0 1,1 0," + (-r1)
+ + "A" + r1 + "," + r1 + " 0 1,1 0," + r1
+ + "Z")
+ : (r0
+ ? "M" + r1 * c0 + "," + r1 * s0
+ + "A" + r1 + "," + r1 + " 0 " + df + ",1 " + r1 * c1 + "," + r1 * s1
+ + "L" + r0 * c1 + "," + r0 * s1
+ + "A" + r0 + "," + r0 + " 0 " + df + ",0 " + r0 * c0 + "," + r0 * s0
+ + "Z"
+ : "M" + r1 * c0 + "," + r1 * s0
+ + "A" + r1 + "," + r1 + " 0 " + df + ",1 " + r1 * c1 + "," + r1 * s1
+ + "L0,0"
+ + "Z");
+ }
+
+ arc.innerRadius = function(v) {
+ if (!arguments.length) return innerRadius;
+ innerRadius = d3.functor(v);
+ return arc;
+ };
+
+ arc.outerRadius = function(v) {
+ if (!arguments.length) return outerRadius;
+ outerRadius = d3.functor(v);
+ return arc;
+ };
+
+ arc.startAngle = function(v) {
+ if (!arguments.length) return startAngle;
+ startAngle = d3.functor(v);
+ return arc;
+ };
+
+ arc.endAngle = function(v) {
+ if (!arguments.length) return endAngle;
+ endAngle = d3.functor(v);
+ return arc;
+ };
+
+ arc.centroid = function() {
+ var r = (innerRadius.apply(this, arguments)
+ + outerRadius.apply(this, arguments)) / 2,
+ a = (startAngle.apply(this, arguments)
+ + endAngle.apply(this, arguments)) / 2 + d3_svg_arcOffset;
+ return [Math.cos(a) * r, Math.sin(a) * r];
+ };
+
+ return arc;
+};
+
+var d3_svg_arcOffset = -Math.PI / 2,
+ d3_svg_arcMax = 2 * Math.PI - 1e-6;
+
+function d3_svg_arcInnerRadius(d) {
+ return d.innerRadius;
+}
+
+function d3_svg_arcOuterRadius(d) {
+ return d.outerRadius;
+}
+
+function d3_svg_arcStartAngle(d) {
+ return d.startAngle;
+}
+
+function d3_svg_arcEndAngle(d) {
+ return d.endAngle;
+}
+function d3_svg_line(projection) {
+ var x = d3_svg_lineX,
+ y = d3_svg_lineY,
+ interpolate = "linear",
+ interpolator = d3_svg_lineInterpolators[interpolate],
+ tension = .7;
+
+ function line(d) {
+ return d.length < 1 ? null : "M" + interpolator(projection(d3_svg_linePoints(this, d, x, y)), tension);
+ }
+
+ line.x = function(v) {
+ if (!arguments.length) return x;
+ x = v;
+ return line;
+ };
+
+ line.y = function(v) {
+ if (!arguments.length) return y;
+ y = v;
+ return line;
+ };
+
+ line.interpolate = function(v) {
+ if (!arguments.length) return interpolate;
+ interpolator = d3_svg_lineInterpolators[interpolate = v];
+ return line;
+ };
+
+ line.tension = function(v) {
+ if (!arguments.length) return tension;
+ tension = v;
+ return line;
+ };
+
+ return line;
+}
+
+d3.svg.line = function() {
+ return d3_svg_line(Object);
+};
+
+// Converts the specified array of data into an array of points
+// (x-y tuples), by evaluating the specified `x` and `y` functions on each
+// data point. The `this` context of the evaluated functions is the specified
+// "self" object; each function is passed the current datum and index.
+function d3_svg_linePoints(self, d, x, y) {
+ var points = [],
+ i = -1,
+ n = d.length,
+ fx = typeof x === "function",
+ fy = typeof y === "function",
+ value;
+ if (fx && fy) {
+ while (++i < n) points.push([
+ x.call(self, value = d[i], i),
+ y.call(self, value, i)
+ ]);
+ } else if (fx) {
+ while (++i < n) points.push([x.call(self, d[i], i), y]);
+ } else if (fy) {
+ while (++i < n) points.push([x, y.call(self, d[i], i)]);
+ } else {
+ while (++i < n) points.push([x, y]);
+ }
+ return points;
+}
+
+// The default `x` property, which references d[0].
+function d3_svg_lineX(d) {
+ return d[0];
+}
+
+// The default `y` property, which references d[1].
+function d3_svg_lineY(d) {
+ return d[1];
+}
+
+// The various interpolators supported by the `line` class.
+var d3_svg_lineInterpolators = {
+ "linear": d3_svg_lineLinear,
+ "step-before": d3_svg_lineStepBefore,
+ "step-after": d3_svg_lineStepAfter,
+ "basis": d3_svg_lineBasis,
+ "basis-open": d3_svg_lineBasisOpen,
+ "basis-closed": d3_svg_lineBasisClosed,
+ "bundle": d3_svg_lineBundle,
+ "cardinal": d3_svg_lineCardinal,
+ "cardinal-open": d3_svg_lineCardinalOpen,
+ "cardinal-closed": d3_svg_lineCardinalClosed,
+ "monotone": d3_svg_lineMonotone
+};
+
+// Linear interpolation; generates "L" commands.
+function d3_svg_lineLinear(points) {
+ var i = 0,
+ n = points.length,
+ p = points[0],
+ path = [p[0], ",", p[1]];
+ while (++i < n) path.push("L", (p = points[i])[0], ",", p[1]);
+ return path.join("");
+}
+
+// Step interpolation; generates "H" and "V" commands.
+function d3_svg_lineStepBefore(points) {
+ var i = 0,
+ n = points.length,
+ p = points[0],
+ path = [p[0], ",", p[1]];
+ while (++i < n) path.push("V", (p = points[i])[1], "H", p[0]);
+ return path.join("");
+}
+
+// Step interpolation; generates "H" and "V" commands.
+function d3_svg_lineStepAfter(points) {
+ var i = 0,
+ n = points.length,
+ p = points[0],
+ path = [p[0], ",", p[1]];
+ while (++i < n) path.push("H", (p = points[i])[0], "V", p[1]);
+ return path.join("");
+}
+
+// Open cardinal spline interpolation; generates "C" commands.
+function d3_svg_lineCardinalOpen(points, tension) {
+ return points.length < 4
+ ? d3_svg_lineLinear(points)
+ : points[1] + d3_svg_lineHermite(points.slice(1, points.length - 1),
+ d3_svg_lineCardinalTangents(points, tension));
+}
+
+// Closed cardinal spline interpolation; generates "C" commands.
+function d3_svg_lineCardinalClosed(points, tension) {
+ return points.length < 3
+ ? d3_svg_lineLinear(points)
+ : points[0] + d3_svg_lineHermite((points.push(points[0]), points),
+ d3_svg_lineCardinalTangents([points[points.length - 2]]
+ .concat(points, [points[1]]), tension));
+}
+
+// Cardinal spline interpolation; generates "C" commands.
+function d3_svg_lineCardinal(points, tension, closed) {
+ return points.length < 3
+ ? d3_svg_lineLinear(points)
+ : points[0] + d3_svg_lineHermite(points,
+ d3_svg_lineCardinalTangents(points, tension));
+}
+
+// Hermite spline construction; generates "C" commands.
+function d3_svg_lineHermite(points, tangents) {
+ if (tangents.length < 1
+ || (points.length != tangents.length
+ && points.length != tangents.length + 2)) {
+ return d3_svg_lineLinear(points);
+ }
+
+ var quad = points.length != tangents.length,
+ path = "",
+ p0 = points[0],
+ p = points[1],
+ t0 = tangents[0],
+ t = t0,
+ pi = 1;
+
+ if (quad) {
+ path += "Q" + (p[0] - t0[0] * 2 / 3) + "," + (p[1] - t0[1] * 2 / 3)
+ + "," + p[0] + "," + p[1];
+ p0 = points[1];
+ pi = 2;
+ }
+
+ if (tangents.length > 1) {
+ t = tangents[1];
+ p = points[pi];
+ pi++;
+ path += "C" + (p0[0] + t0[0]) + "," + (p0[1] + t0[1])
+ + "," + (p[0] - t[0]) + "," + (p[1] - t[1])
+ + "," + p[0] + "," + p[1];
+ for (var i = 2; i < tangents.length; i++, pi++) {
+ p = points[pi];
+ t = tangents[i];
+ path += "S" + (p[0] - t[0]) + "," + (p[1] - t[1])
+ + "," + p[0] + "," + p[1];
+ }
+ }
+
+ if (quad) {
+ var lp = points[pi];
+ path += "Q" + (p[0] + t[0] * 2 / 3) + "," + (p[1] + t[1] * 2 / 3)
+ + "," + lp[0] + "," + lp[1];
+ }
+
+ return path;
+}
+
+// Generates tangents for a cardinal spline.
+function d3_svg_lineCardinalTangents(points, tension) {
+ var tangents = [],
+ a = (1 - tension) / 2,
+ p0,
+ p1 = points[0],
+ p2 = points[1],
+ i = 1,
+ n = points.length;
+ while (++i < n) {
+ p0 = p1;
+ p1 = p2;
+ p2 = points[i];
+ tangents.push([a * (p2[0] - p0[0]), a * (p2[1] - p0[1])]);
+ }
+ return tangents;
+}
+
+// B-spline interpolation; generates "C" commands.
+function d3_svg_lineBasis(points) {
+ if (points.length < 3) return d3_svg_lineLinear(points);
+ var i = 1,
+ n = points.length,
+ pi = points[0],
+ x0 = pi[0],
+ y0 = pi[1],
+ px = [x0, x0, x0, (pi = points[1])[0]],
+ py = [y0, y0, y0, pi[1]],
+ path = [x0, ",", y0];
+ d3_svg_lineBasisBezier(path, px, py);
+ while (++i < n) {
+ pi = points[i];
+ px.shift(); px.push(pi[0]);
+ py.shift(); py.push(pi[1]);
+ d3_svg_lineBasisBezier(path, px, py);
+ }
+ i = -1;
+ while (++i < 2) {
+ px.shift(); px.push(pi[0]);
+ py.shift(); py.push(pi[1]);
+ d3_svg_lineBasisBezier(path, px, py);
+ }
+ return path.join("");
+}
+
+// Open B-spline interpolation; generates "C" commands.
+function d3_svg_lineBasisOpen(points) {
+ if (points.length < 4) return d3_svg_lineLinear(points);
+ var path = [],
+ i = -1,
+ n = points.length,
+ pi,
+ px = [0],
+ py = [0];
+ while (++i < 3) {
+ pi = points[i];
+ px.push(pi[0]);
+ py.push(pi[1]);
+ }
+ path.push(d3_svg_lineDot4(d3_svg_lineBasisBezier3, px)
+ + "," + d3_svg_lineDot4(d3_svg_lineBasisBezier3, py));
+ --i; while (++i < n) {
+ pi = points[i];
+ px.shift(); px.push(pi[0]);
+ py.shift(); py.push(pi[1]);
+ d3_svg_lineBasisBezier(path, px, py);
+ }
+ return path.join("");
+}
+
+// Closed B-spline interpolation; generates "C" commands.
+function d3_svg_lineBasisClosed(points) {
+ var path,
+ i = -1,
+ n = points.length,
+ m = n + 4,
+ pi,
+ px = [],
+ py = [];
+ while (++i < 4) {
+ pi = points[i % n];
+ px.push(pi[0]);
+ py.push(pi[1]);
+ }
+ path = [
+ d3_svg_lineDot4(d3_svg_lineBasisBezier3, px), ",",
+ d3_svg_lineDot4(d3_svg_lineBasisBezier3, py)
+ ];
+ --i; while (++i < m) {
+ pi = points[i % n];
+ px.shift(); px.push(pi[0]);
+ py.shift(); py.push(pi[1]);
+ d3_svg_lineBasisBezier(path, px, py);
+ }
+ return path.join("");
+}
+
+function d3_svg_lineBundle(points, tension) {
+ var n = points.length - 1,
+ x0 = points[0][0],
+ y0 = points[0][1],
+ dx = points[n][0] - x0,
+ dy = points[n][1] - y0,
+ i = -1,
+ p,
+ t;
+ while (++i <= n) {
+ p = points[i];
+ t = i / n;
+ p[0] = tension * p[0] + (1 - tension) * (x0 + t * dx);
+ p[1] = tension * p[1] + (1 - tension) * (y0 + t * dy);
+ }
+ return d3_svg_lineBasis(points);
+}
+
+// Returns the dot product of the given four-element vectors.
+function d3_svg_lineDot4(a, b) {
+ return a[0] * b[0] + a[1] * b[1] + a[2] * b[2] + a[3] * b[3];
+}
+
+// Matrix to transform basis (b-spline) control points to bezier
+// control points. Derived from FvD 11.2.8.
+var d3_svg_lineBasisBezier1 = [0, 2/3, 1/3, 0],
+ d3_svg_lineBasisBezier2 = [0, 1/3, 2/3, 0],
+ d3_svg_lineBasisBezier3 = [0, 1/6, 2/3, 1/6];
+
+// Pushes a "C" Bézier curve onto the specified path array, given the
+// two specified four-element arrays which define the control points.
+function d3_svg_lineBasisBezier(path, x, y) {
+ path.push(
+ "C", d3_svg_lineDot4(d3_svg_lineBasisBezier1, x),
+ ",", d3_svg_lineDot4(d3_svg_lineBasisBezier1, y),
+ ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, x),
+ ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, y),
+ ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, x),
+ ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, y));
+}
+
+// Computes the slope from points p0 to p1.
+function d3_svg_lineSlope(p0, p1) {
+ return (p1[1] - p0[1]) / (p1[0] - p0[0]);
+}
+
+// Compute three-point differences for the given points.
+// http://en.wikipedia.org/wiki/Cubic_Hermite_spline#Finite_difference
+function d3_svg_lineFiniteDifferences(points) {
+ var i = 0,
+ j = points.length - 1,
+ m = [],
+ p0 = points[0],
+ p1 = points[1],
+ d = m[0] = d3_svg_lineSlope(p0, p1);
+ while (++i < j) {
+ m[i] = d + (d = d3_svg_lineSlope(p0 = p1, p1 = points[i + 1]));
+ }
+ m[i] = d;
+ return m;
+}
+
+// Interpolates the given points using Fritsch-Carlson Monotone cubic Hermite
+// interpolation. Returns an array of tangent vectors. For details, see
+// http://en.wikipedia.org/wiki/Monotone_cubic_interpolation
+function d3_svg_lineMonotoneTangents(points) {
+ var tangents = [],
+ d,
+ a,
+ b,
+ s,
+ m = d3_svg_lineFiniteDifferences(points),
+ i = -1,
+ j = points.length - 1;
+
+ // The first two steps are done by computing finite-differences:
+ // 1. Compute the slopes of the secant lines between successive points.
+ // 2. Initialize the tangents at every point as the average of the secants.
+
+ // Then, for each segment…
+ while (++i < j) {
+ d = d3_svg_lineSlope(points[i], points[i + 1]);
+
+ // 3. If two successive yk = y{k + 1} are equal (i.e., d is zero), then set
+ // mk = m{k + 1} = 0 as the spline connecting these points must be flat to
+ // preserve monotonicity. Ignore step 4 and 5 for those k.
+
+ if (Math.abs(d) < 1e-6) {
+ m[i] = m[i + 1] = 0;
+ } else {
+ // 4. Let ak = mk / dk and bk = m{k + 1} / dk.
+ a = m[i] / d;
+ b = m[i + 1] / d;
+
+ // 5. Prevent overshoot and ensure monotonicity by restricting the
+ // magnitude of vector <ak, bk> to a circle of radius 3.
+ s = a * a + b * b;
+ if (s > 9) {
+ s = d * 3 / Math.sqrt(s);
+ m[i] = s * a;
+ m[i + 1] = s * b;
+ }
+ }
+ }
+
+ // Compute the normalized tangent vector from the slopes. Note that if x is
+ // not monotonic, it's possible that the slope will be infinite, so we protect
+ // against NaN by setting the coordinate to zero.
+ i = -1; while (++i <= j) {
+ s = (points[Math.min(j, i + 1)][0] - points[Math.max(0, i - 1)][0])
+ / (6 * (1 + m[i] * m[i]));
+ tangents.push([s || 0, m[i] * s || 0]);
+ }
+
+ return tangents;
+}
+
+function d3_svg_lineMonotone(points) {
+ return points.length < 3
+ ? d3_svg_lineLinear(points)
+ : points[0] +
+ d3_svg_lineHermite(points, d3_svg_lineMonotoneTangents(points));
+}
+d3.svg.line.radial = function() {
+ var line = d3_svg_line(d3_svg_lineRadial);
+ line.radius = line.x, delete line.x;
+ line.angle = line.y, delete line.y;
+ return line;
+};
+
+function d3_svg_lineRadial(points) {
+ var point,
+ i = -1,
+ n = points.length,
+ r,
+ a;
+ while (++i < n) {
+ point = points[i];
+ r = point[0];
+ a = point[1] + d3_svg_arcOffset;
+ point[0] = r * Math.cos(a);
+ point[1] = r * Math.sin(a);
+ }
+ return points;
+}
+function d3_svg_area(projection) {
+ var x0 = d3_svg_lineX,
+ x1 = d3_svg_lineX,
+ y0 = 0,
+ y1 = d3_svg_lineY,
+ interpolate,
+ i0,
+ i1,
+ tension = .7;
+
+ function area(d) {
+ if (d.length < 1) return null;
+ var points0 = d3_svg_linePoints(this, d, x0, y0),
+ points1 = d3_svg_linePoints(this, d, x0 === x1 ? d3_svg_areaX(points0) : x1, y0 === y1 ? d3_svg_areaY(points0) : y1);
+ return "M" + i0(projection(points1), tension)
+ + "L" + i1(projection(points0.reverse()), tension)
+ + "Z";
+ }
+
+ area.x = function(x) {
+ if (!arguments.length) return x1;
+ x0 = x1 = x;
+ return area;
+ };
+
+ area.x0 = function(x) {
+ if (!arguments.length) return x0;
+ x0 = x;
+ return area;
+ };
+
+ area.x1 = function(x) {
+ if (!arguments.length) return x1;
+ x1 = x;
+ return area;
+ };
+
+ area.y = function(y) {
+ if (!arguments.length) return y1;
+ y0 = y1 = y;
+ return area;
+ };
+
+ area.y0 = function(y) {
+ if (!arguments.length) return y0;
+ y0 = y;
+ return area;
+ };
+
+ area.y1 = function(y) {
+ if (!arguments.length) return y1;
+ y1 = y;
+ return area;
+ };
+
+ area.interpolate = function(x) {
+ if (!arguments.length) return interpolate;
+ i0 = d3_svg_lineInterpolators[interpolate = x];
+ i1 = i0.reverse || i0;
+ return area;
+ };
+
+ area.tension = function(x) {
+ if (!arguments.length) return tension;
+ tension = x;
+ return area;
+ };
+
+ return area.interpolate("linear");
+}
+
+d3_svg_lineStepBefore.reverse = d3_svg_lineStepAfter;
+d3_svg_lineStepAfter.reverse = d3_svg_lineStepBefore;
+
+d3.svg.area = function() {
+ return d3_svg_area(Object);
+};
+
+function d3_svg_areaX(points) {
+ return function(d, i) {
+ return points[i][0];
+ };
+}
+
+function d3_svg_areaY(points) {
+ return function(d, i) {
+ return points[i][1];
+ };
+}
+d3.svg.area.radial = function() {
+ var area = d3_svg_area(d3_svg_lineRadial);
+ area.radius = area.x, delete area.x;
+ area.innerRadius = area.x0, delete area.x0;
+ area.outerRadius = area.x1, delete area.x1;
+ area.angle = area.y, delete area.y;
+ area.startAngle = area.y0, delete area.y0;
+ area.endAngle = area.y1, delete area.y1;
+ return area;
+};
+d3.svg.chord = function() {
+ var source = d3_svg_chordSource,
+ target = d3_svg_chordTarget,
+ radius = d3_svg_chordRadius,
+ startAngle = d3_svg_arcStartAngle,
+ endAngle = d3_svg_arcEndAngle;
+
+ // TODO Allow control point to be customized.
+
+ function chord(d, i) {
+ var s = subgroup(this, source, d, i),
+ t = subgroup(this, target, d, i);
+ return "M" + s.p0
+ + arc(s.r, s.p1) + (equals(s, t)
+ ? curve(s.r, s.p1, s.r, s.p0)
+ : curve(s.r, s.p1, t.r, t.p0)
+ + arc(t.r, t.p1)
+ + curve(t.r, t.p1, s.r, s.p0))
+ + "Z";
+ }
+
+ function subgroup(self, f, d, i) {
+ var subgroup = f.call(self, d, i),
+ r = radius.call(self, subgroup, i),
+ a0 = startAngle.call(self, subgroup, i) + d3_svg_arcOffset,
+ a1 = endAngle.call(self, subgroup, i) + d3_svg_arcOffset;
+ return {
+ r: r,
+ a0: a0,
+ a1: a1,
+ p0: [r * Math.cos(a0), r * Math.sin(a0)],
+ p1: [r * Math.cos(a1), r * Math.sin(a1)]
+ };
+ }
+
+ function equals(a, b) {
+ return a.a0 == b.a0 && a.a1 == b.a1;
+ }
+
+ function arc(r, p) {
+ return "A" + r + "," + r + " 0 0,1 " + p;
+ }
+
+ function curve(r0, p0, r1, p1) {
+ return "Q 0,0 " + p1;
+ }
+
+ chord.radius = function(v) {
+ if (!arguments.length) return radius;
+ radius = d3.functor(v);
+ return chord;
+ };
+
+ chord.source = function(v) {
+ if (!arguments.length) return source;
+ source = d3.functor(v);
+ return chord;
+ };
+
+ chord.target = function(v) {
+ if (!arguments.length) return target;
+ target = d3.functor(v);
+ return chord;
+ };
+
+ chord.startAngle = function(v) {
+ if (!arguments.length) return startAngle;
+ startAngle = d3.functor(v);
+ return chord;
+ };
+
+ chord.endAngle = function(v) {
+ if (!arguments.length) return endAngle;
+ endAngle = d3.functor(v);
+ return chord;
+ };
+
+ return chord;
+};
+
+function d3_svg_chordSource(d) {
+ return d.source;
+}
+
+function d3_svg_chordTarget(d) {
+ return d.target;
+}
+
+function d3_svg_chordRadius(d) {
+ return d.radius;
+}
+
+function d3_svg_chordStartAngle(d) {
+ return d.startAngle;
+}
+
+function d3_svg_chordEndAngle(d) {
+ return d.endAngle;
+}
+d3.svg.diagonal = function() {
+ var source = d3_svg_chordSource,
+ target = d3_svg_chordTarget,
+ projection = d3_svg_diagonalProjection;
+
+ function diagonal(d, i) {
+ var p0 = source.call(this, d, i),
+ p3 = target.call(this, d, i),
+ m = (p0.y + p3.y) / 2,
+ p = [p0, {x: p0.x, y: m}, {x: p3.x, y: m}, p3];
+ p = p.map(projection);
+ return "M" + p[0] + "C" + p[1] + " " + p[2] + " " + p[3];
+ }
+
+ diagonal.source = function(x) {
+ if (!arguments.length) return source;
+ source = d3.functor(x);
+ return diagonal;
+ };
+
+ diagonal.target = function(x) {
+ if (!arguments.length) return target;
+ target = d3.functor(x);
+ return diagonal;
+ };
+
+ diagonal.projection = function(x) {
+ if (!arguments.length) return projection;
+ projection = x;
+ return diagonal;
+ };
+
+ return diagonal;
+};
+
+function d3_svg_diagonalProjection(d) {
+ return [d.x, d.y];
+}
+d3.svg.diagonal.radial = function() {
+ var diagonal = d3.svg.diagonal(),
+ projection = d3_svg_diagonalProjection,
+ projection_ = diagonal.projection;
+
+ diagonal.projection = function(x) {
+ return arguments.length
+ ? projection_(d3_svg_diagonalRadialProjection(projection = x))
+ : projection;
+ };
+
+ return diagonal;
+};
+
+function d3_svg_diagonalRadialProjection(projection) {
+ return function() {
+ var d = projection.apply(this, arguments),
+ r = d[0],
+ a = d[1] + d3_svg_arcOffset;
+ return [r * Math.cos(a), r * Math.sin(a)];
+ };
+}
+d3.svg.mouse = function(container) {
+ return d3_svg_mousePoint(container, d3.event);
+};
+
+// https://bugs.webkit.org/show_bug.cgi?id=44083
+var d3_mouse_bug44083 = /WebKit/.test(navigator.userAgent) ? -1 : 0;
+
+function d3_svg_mousePoint(container, e) {
+ var point = (container.ownerSVGElement || container).createSVGPoint();
+ if ((d3_mouse_bug44083 < 0) && (window.scrollX || window.scrollY)) {
+ var svg = d3.select(document.body)
+ .append("svg:svg")
+ .style("position", "absolute")
+ .style("top", 0)
+ .style("left", 0);
+ var ctm = svg[0][0].getScreenCTM();
+ d3_mouse_bug44083 = !(ctm.f || ctm.e);
+ svg.remove();
+ }
+ if (d3_mouse_bug44083) {
+ point.x = e.pageX;
+ point.y = e.pageY;
+ } else {
+ point.x = e.clientX;
+ point.y = e.clientY;
+ }
+ point = point.matrixTransform(container.getScreenCTM().inverse());
+ return [point.x, point.y];
+};
+d3.svg.touches = function(container) {
+ var touches = d3.event.touches;
+ return touches ? d3_array(touches).map(function(touch) {
+ var point = d3_svg_mousePoint(container, touch);
+ point.identifier = touch.identifier;
+ return point;
+ }) : [];
+};
+d3.svg.symbol = function() {
+ var type = d3_svg_symbolType,
+ size = d3_svg_symbolSize;
+
+ function symbol(d, i) {
+ return (d3_svg_symbols[type.call(this, d, i)]
+ || d3_svg_symbols.circle)
+ (size.call(this, d, i));
+ }
+
+ symbol.type = function(x) {
+ if (!arguments.length) return type;
+ type = d3.functor(x);
+ return symbol;
+ };
+
+ // size of symbol in square pixels
+ symbol.size = function(x) {
+ if (!arguments.length) return size;
+ size = d3.functor(x);
+ return symbol;
+ };
+
+ return symbol;
+};
+
+function d3_svg_symbolSize() {
+ return 64;
+}
+
+function d3_svg_symbolType() {
+ return "circle";
+}
+
+// TODO cross-diagonal?
+var d3_svg_symbols = {
+ "circle": function(size) {
+ var r = Math.sqrt(size / Math.PI);
+ return "M0," + r
+ + "A" + r + "," + r + " 0 1,1 0," + (-r)
+ + "A" + r + "," + r + " 0 1,1 0," + r
+ + "Z";
+ },
+ "cross": function(size) {
+ var r = Math.sqrt(size / 5) / 2;
+ return "M" + -3 * r + "," + -r
+ + "H" + -r
+ + "V" + -3 * r
+ + "H" + r
+ + "V" + -r
+ + "H" + 3 * r
+ + "V" + r
+ + "H" + r
+ + "V" + 3 * r
+ + "H" + -r
+ + "V" + r
+ + "H" + -3 * r
+ + "Z";
+ },
+ "diamond": function(size) {
+ var ry = Math.sqrt(size / (2 * d3_svg_symbolTan30)),
+ rx = ry * d3_svg_symbolTan30;
+ return "M0," + -ry
+ + "L" + rx + ",0"
+ + " 0," + ry
+ + " " + -rx + ",0"
+ + "Z";
+ },
+ "square": function(size) {
+ var r = Math.sqrt(size) / 2;
+ return "M" + -r + "," + -r
+ + "L" + r + "," + -r
+ + " " + r + "," + r
+ + " " + -r + "," + r
+ + "Z";
+ },
+ "triangle-down": function(size) {
+ var rx = Math.sqrt(size / d3_svg_symbolSqrt3),
+ ry = rx * d3_svg_symbolSqrt3 / 2;
+ return "M0," + ry
+ + "L" + rx +"," + -ry
+ + " " + -rx + "," + -ry
+ + "Z";
+ },
+ "triangle-up": function(size) {
+ var rx = Math.sqrt(size / d3_svg_symbolSqrt3),
+ ry = rx * d3_svg_symbolSqrt3 / 2;
+ return "M0," + -ry
+ + "L" + rx +"," + ry
+ + " " + -rx + "," + ry
+ + "Z";
+ }
+};
+
+d3.svg.symbolTypes = d3.keys(d3_svg_symbols);
+
+var d3_svg_symbolSqrt3 = Math.sqrt(3),
+ d3_svg_symbolTan30 = Math.tan(30 * Math.PI / 180);
+d3.svg.axis = function() {
+ var scale = d3.scale.linear(),
+ orient = "bottom",
+ tickMajorSize = 6,
+ tickMinorSize = 6,
+ tickEndSize = 6,
+ tickPadding = 3,
+ tickArguments_ = [10],
+ tickFormat_,
+ tickSubdivide = 0;
+
+ function axis(selection) {
+ selection.each(function(d, i, j) {
+ var g = d3.select(this);
+
+ // If selection is a transition, create subtransitions.
+ var transition = selection.delay ? function(o) {
+ var id = d3_transitionInheritId;
+ try {
+ d3_transitionInheritId = selection.id;
+ return o.transition()
+ .delay(selection[j][i].delay)
+ .duration(selection[j][i].duration)
+ .ease(selection.ease());
+ } finally {
+ d3_transitionInheritId = id;
+ }
+ } : Object;
+
+ // Ticks.
+ var ticks = scale.ticks.apply(scale, tickArguments_),
+ tickFormat = tickFormat_ == null ? scale.tickFormat.apply(scale, tickArguments_) : tickFormat_;
+
+ // Minor ticks.
+ var subticks = d3_svg_axisSubdivide(scale, ticks, tickSubdivide),
+ subtick = g.selectAll(".minor").data(subticks, String),
+ subtickEnter = subtick.enter().insert("svg:line", "g").attr("class", "tick minor").style("opacity", 1e-6),
+ subtickExit = transition(subtick.exit()).style("opacity", 1e-6).remove(),
+ subtickUpdate = transition(subtick).style("opacity", 1);
+
+ // Major ticks.
+ var tick = g.selectAll("g").data(ticks, String),
+ tickEnter = tick.enter().insert("svg:g", "path").style("opacity", 1e-6),
+ tickExit = transition(tick.exit()).style("opacity", 1e-6).remove(),
+ tickUpdate = transition(tick).style("opacity", 1),
+ tickTransform;
+
+ // Domain.
+ var range = d3_scaleExtent(scale.range()),
+ path = g.selectAll(".domain").data([0]),
+ pathEnter = path.enter().append("svg:path").attr("class", "domain"),
+ pathUpdate = transition(path);
+
+ // Stash the new scale and grab the old scale.
+ var scale0 = this.__chart__ || scale;
+ this.__chart__ = scale.copy();
+
+ tickEnter.append("svg:line").attr("class", "tick");
+ tickEnter.append("svg:text");
+ tickUpdate.select("text").text(tickFormat);
+
+ switch (orient) {
+ case "bottom": {
+ tickTransform = d3_svg_axisX;
+ subtickUpdate.attr("x2", 0).attr("y2", tickMinorSize);
+ tickUpdate.select("line").attr("x2", 0).attr("y2", tickMajorSize);
+ tickUpdate.select("text").attr("x", 0).attr("y", Math.max(tickMajorSize, 0) + tickPadding).attr("dy", ".71em").attr("text-anchor", "middle");
+ pathUpdate.attr("d", "M" + range[0] + "," + tickEndSize + "V0H" + range[1] + "V" + tickEndSize);
+ break;
+ }
+ case "top": {
+ tickTransform = d3_svg_axisX;
+ subtickUpdate.attr("x2", 0).attr("y2", -tickMinorSize);
+ tickUpdate.select("line").attr("x2", 0).attr("y2", -tickMajorSize);
+ tickUpdate.select("text").attr("x", 0).attr("y", -(Math.max(tickMajorSize, 0) + tickPadding)).attr("dy", "0em").attr("text-anchor", "middle");
+ pathUpdate.attr("d", "M" + range[0] + "," + -tickEndSize + "V0H" + range[1] + "V" + -tickEndSize);
+ break;
+ }
+ case "left": {
+ tickTransform = d3_svg_axisY;
+ subtickUpdate.attr("x2", -tickMinorSize).attr("y2", 0);
+ tickUpdate.select("line").attr("x2", -tickMajorSize).attr("y2", 0);
+ tickUpdate.select("text").attr("x", -(Math.max(tickMajorSize, 0) + tickPadding)).attr("y", 0).attr("dy", ".32em").attr("text-anchor", "end");
+ pathUpdate.attr("d", "M" + -tickEndSize + "," + range[0] + "H0V" + range[1] + "H" + -tickEndSize);
+ break;
+ }
+ case "right": {
+ tickTransform = d3_svg_axisY;
+ subtickUpdate.attr("x2", tickMinorSize).attr("y2", 0);
+ tickUpdate.select("line").attr("x2", tickMajorSize).attr("y2", 0);
+ tickUpdate.select("text").attr("x", Math.max(tickMajorSize, 0) + tickPadding).attr("y", 0).attr("dy", ".32em").attr("text-anchor", "start");
+ pathUpdate.attr("d", "M" + tickEndSize + "," + range[0] + "H0V" + range[1] + "H" + tickEndSize);
+ break;
+ }
+ }
+
+ tickEnter.call(tickTransform, scale0);
+ tickUpdate.call(tickTransform, scale);
+ tickExit.call(tickTransform, scale);
+
+ subtickEnter.call(tickTransform, scale0);
+ subtickUpdate.call(tickTransform, scale);
+ subtickExit.call(tickTransform, scale);
+ });
+ }
+
+ axis.scale = function(x) {
+ if (!arguments.length) return scale;
+ scale = x;
+ return axis;
+ };
+
+ axis.orient = function(x) {
+ if (!arguments.length) return orient;
+ orient = x;
+ return axis;
+ };
+
+ axis.ticks = function() {
+ if (!arguments.length) return tickArguments_;
+ tickArguments_ = arguments;
+ return axis;
+ };
+
+ axis.tickFormat = function(x) {
+ if (!arguments.length) return tickFormat_;
+ tickFormat_ = x;
+ return axis;
+ };
+
+ axis.tickSize = function(x, y, z) {
+ if (!arguments.length) return tickMajorSize;
+ var n = arguments.length - 1;
+ tickMajorSize = +x;
+ tickMinorSize = n > 1 ? +y : tickMajorSize;
+ tickEndSize = n > 0 ? +arguments[n] : tickMajorSize;
+ return axis;
+ };
+
+ axis.tickPadding = function(x) {
+ if (!arguments.length) return tickPadding;
+ tickPadding = +x;
+ return axis;
+ };
+
+ axis.tickSubdivide = function(x) {
+ if (!arguments.length) return tickSubdivide;
+ tickSubdivide = +x;
+ return axis;
+ };
+
+ return axis;
+};
+
+function d3_svg_axisX(selection, x) {
+ selection.attr("transform", function(d) { return "translate(" + x(d) + ",0)"; });
+}
+
+function d3_svg_axisY(selection, y) {
+ selection.attr("transform", function(d) { return "translate(0," + y(d) + ")"; });
+}
+
+function d3_svg_axisSubdivide(scale, ticks, m) {
+ subticks = [];
+ if (m && ticks.length > 1) {
+ var extent = d3_scaleExtent(scale.domain()),
+ subticks,
+ i = -1,
+ n = ticks.length,
+ d = (ticks[1] - ticks[0]) / ++m,
+ j,
+ v;
+ while (++i < n) {
+ for (j = m; --j > 0;) {
+ if ((v = +ticks[i] - j * d) >= extent[0]) {
+ subticks.push(v);
+ }
+ }
+ }
+ for (--i, j = 0; ++j < m && (v = +ticks[i] + j * d) < extent[1];) {
+ subticks.push(v);
+ }
+ }
+ return subticks;
+}
+d3.behavior = {};
+d3.behavior.drag = function() {
+ var event = d3.dispatch("drag", "dragstart", "dragend");
+
+ function drag() {
+ this
+ .on("mousedown.drag", mousedown)
+ .on("touchstart.drag", mousedown);
+
+ d3.select(window)
+ .on("mousemove.drag", d3_behavior_dragMove)
+ .on("touchmove.drag", d3_behavior_dragMove)
+ .on("mouseup.drag", d3_behavior_dragUp, true)
+ .on("touchend.drag", d3_behavior_dragUp, true)
+ .on("click.drag", d3_behavior_dragClick, true);
+ }
+
+ // snapshot the local context for subsequent dispatch
+ function start() {
+ d3_behavior_dragEvent = event;
+ d3_behavior_dragEventTarget = d3.event.target;
+ d3_behavior_dragOffset = d3_behavior_dragPoint((d3_behavior_dragTarget = this).parentNode);
+ d3_behavior_dragMoved = 0;
+ d3_behavior_dragArguments = arguments;
+ }
+
+ function mousedown() {
+ start.apply(this, arguments);
+ d3_behavior_dragDispatch("dragstart");
+ }
+
+ drag.on = function(type, listener) {
+ event[type].add(listener);
+ return drag;
+ };
+
+ return drag;
+};
+
+var d3_behavior_dragEvent,
+ d3_behavior_dragEventTarget,
+ d3_behavior_dragTarget,
+ d3_behavior_dragArguments,
+ d3_behavior_dragOffset,
+ d3_behavior_dragMoved,
+ d3_behavior_dragStopClick;
+
+function d3_behavior_dragDispatch(type) {
+ var o = d3.event, p = d3_behavior_dragTarget.parentNode, dx = 0, dy = 0;
+
+ if (p) {
+ p = d3_behavior_dragPoint(p);
+ dx = p[0] - d3_behavior_dragOffset[0];
+ dy = p[1] - d3_behavior_dragOffset[1];
+ d3_behavior_dragOffset = p;
+ d3_behavior_dragMoved |= dx | dy;
+ }
+
+ try {
+ d3.event = {dx: dx, dy: dy};
+ d3_behavior_dragEvent[type].dispatch.apply(d3_behavior_dragTarget, d3_behavior_dragArguments);
+ } finally {
+ d3.event = o;
+ }
+
+ o.preventDefault();
+}
+
+function d3_behavior_dragPoint(container) {
+ return d3.event.touches
+ ? d3.svg.touches(container)[0]
+ : d3.svg.mouse(container);
+}
+
+function d3_behavior_dragMove() {
+ if (!d3_behavior_dragTarget) return;
+ var parent = d3_behavior_dragTarget.parentNode;
+
+ // O NOES! The drag element was removed from the DOM.
+ if (!parent) return d3_behavior_dragUp();
+
+ d3_behavior_dragDispatch("drag");
+ d3_behavior_dragCancel();
+}
+
+function d3_behavior_dragUp() {
+ if (!d3_behavior_dragTarget) return;
+ d3_behavior_dragDispatch("dragend");
+ d3_behavior_dragTarget = null;
+
+ // If the node was moved, prevent the mouseup from propagating.
+ // Also prevent the subsequent click from propagating (e.g., for anchors).
+ if (d3_behavior_dragMoved && d3_behavior_dragEventTarget === d3.event.target) {
+ d3_behavior_dragStopClick = true;
+ d3_behavior_dragCancel();
+ }
+}
+
+function d3_behavior_dragClick() {
+ if (d3_behavior_dragStopClick && d3_behavior_dragEventTarget === d3.event.target) {
+ d3_behavior_dragCancel();
+ d3_behavior_dragStopClick = false;
+ d3_behavior_dragEventTarget = null;
+ }
+}
+
+function d3_behavior_dragCancel() {
+ d3.event.stopPropagation();
+ d3.event.preventDefault();
+}
+// TODO unbind zoom behavior?
+// TODO unbind listener?
+d3.behavior.zoom = function() {
+ var xyz = [0, 0, 0],
+ event = d3.dispatch("zoom");
+
+ function zoom() {
+ this
+ .on("mousedown.zoom", mousedown)
+ .on("mousewheel.zoom", mousewheel)
+ .on("DOMMouseScroll.zoom", mousewheel)
+ .on("dblclick.zoom", dblclick)
+ .on("touchstart.zoom", touchstart);
+
+ d3.select(window)
+ .on("mousemove.zoom", d3_behavior_zoomMousemove)
+ .on("mouseup.zoom", d3_behavior_zoomMouseup)
+ .on("touchmove.zoom", d3_behavior_zoomTouchmove)
+ .on("touchend.zoom", d3_behavior_zoomTouchup)
+ .on("click.zoom", d3_behavior_zoomClick, true);
+ }
+
+ // snapshot the local context for subsequent dispatch
+ function start() {
+ d3_behavior_zoomXyz = xyz;
+ d3_behavior_zoomDispatch = event.zoom.dispatch;
+ d3_behavior_zoomEventTarget = d3.event.target;
+ d3_behavior_zoomTarget = this;
+ d3_behavior_zoomArguments = arguments;
+ }
+
+ function mousedown() {
+ start.apply(this, arguments);
+ d3_behavior_zoomPanning = d3_behavior_zoomLocation(d3.svg.mouse(d3_behavior_zoomTarget));
+ d3_behavior_zoomMoved = false;
+ d3.event.preventDefault();
+ window.focus();
+ }
+
+ // store starting mouse location
+ function mousewheel() {
+ start.apply(this, arguments);
+ if (!d3_behavior_zoomZooming) d3_behavior_zoomZooming = d3_behavior_zoomLocation(d3.svg.mouse(d3_behavior_zoomTarget));
+ d3_behavior_zoomTo(d3_behavior_zoomDelta() + xyz[2], d3.svg.mouse(d3_behavior_zoomTarget), d3_behavior_zoomZooming);
+ }
+
+ function dblclick() {
+ start.apply(this, arguments);
+ var mouse = d3.svg.mouse(d3_behavior_zoomTarget);
+ d3_behavior_zoomTo(d3.event.shiftKey ? Math.ceil(xyz[2] - 1) : Math.floor(xyz[2] + 1), mouse, d3_behavior_zoomLocation(mouse));
+ }
+
+ // doubletap detection
+ function touchstart() {
+ start.apply(this, arguments);
+ var touches = d3_behavior_zoomTouchup(),
+ touch,
+ now = Date.now();
+ if ((touches.length === 1) && (now - d3_behavior_zoomLast < 300)) {
+ d3_behavior_zoomTo(1 + Math.floor(xyz[2]), touch = touches[0], d3_behavior_zoomLocations[touch.identifier]);
+ }
+ d3_behavior_zoomLast = now;
+ }
+
+ zoom.on = function(type, listener) {
+ event[type].add(listener);
+ return zoom;
+ };
+
+ return zoom;
+};
+
+var d3_behavior_zoomDiv,
+ d3_behavior_zoomPanning,
+ d3_behavior_zoomZooming,
+ d3_behavior_zoomLocations = {}, // identifier -> location
+ d3_behavior_zoomLast = 0,
+ d3_behavior_zoomXyz,
+ d3_behavior_zoomDispatch,
+ d3_behavior_zoomEventTarget,
+ d3_behavior_zoomTarget,
+ d3_behavior_zoomArguments,
+ d3_behavior_zoomMoved,
+ d3_behavior_zoomStopClick;
+
+function d3_behavior_zoomLocation(point) {
+ return [
+ point[0] - d3_behavior_zoomXyz[0],
+ point[1] - d3_behavior_zoomXyz[1],
+ d3_behavior_zoomXyz[2]
+ ];
+}
+
+// detect the pixels that would be scrolled by this wheel event
+function d3_behavior_zoomDelta() {
+
+ // mousewheel events are totally broken!
+ // https://bugs.webkit.org/show_bug.cgi?id=40441
+ // not only that, but Chrome and Safari differ in re. to acceleration!
+ if (!d3_behavior_zoomDiv) {
+ d3_behavior_zoomDiv = d3.select("body").append("div")
+ .style("visibility", "hidden")
+ .style("top", 0)
+ .style("height", 0)
+ .style("width", 0)
+ .style("overflow-y", "scroll")
+ .append("div")
+ .style("height", "2000px")
+ .node().parentNode;
+ }
+
+ var e = d3.event, delta;
+ try {
+ d3_behavior_zoomDiv.scrollTop = 1000;
+ d3_behavior_zoomDiv.dispatchEvent(e);
+ delta = 1000 - d3_behavior_zoomDiv.scrollTop;
+ } catch (error) {
+ delta = e.wheelDelta || (-e.detail * 5);
+ }
+
+ return delta * .005;
+}
+
+// Note: Since we don't rotate, it's possible for the touches to become
+// slightly detached from their original positions. Thus, we recompute the
+// touch points on touchend as well as touchstart!
+function d3_behavior_zoomTouchup() {
+ var touches = d3.svg.touches(d3_behavior_zoomTarget),
+ i = -1,
+ n = touches.length,
+ touch;
+ while (++i < n) d3_behavior_zoomLocations[(touch = touches[i]).identifier] = d3_behavior_zoomLocation(touch);
+ return touches;
+}
+
+function d3_behavior_zoomTouchmove() {
+ var touches = d3.svg.touches(d3_behavior_zoomTarget);
+ switch (touches.length) {
+
+ // single-touch pan
+ case 1: {
+ var touch = touches[0];
+ d3_behavior_zoomTo(d3_behavior_zoomXyz[2], touch, d3_behavior_zoomLocations[touch.identifier]);
+ break;
+ }
+
+ // double-touch pan + zoom
+ case 2: {
+ var p0 = touches[0],
+ p1 = touches[1],
+ p2 = [(p0[0] + p1[0]) / 2, (p0[1] + p1[1]) / 2],
+ l0 = d3_behavior_zoomLocations[p0.identifier],
+ l1 = d3_behavior_zoomLocations[p1.identifier],
+ l2 = [(l0[0] + l1[0]) / 2, (l0[1] + l1[1]) / 2, l0[2]];
+ d3_behavior_zoomTo(Math.log(d3.event.scale) / Math.LN2 + l0[2], p2, l2);
+ break;
+ }
+ }
+}
+
+function d3_behavior_zoomMousemove() {
+ d3_behavior_zoomZooming = null;
+ if (d3_behavior_zoomPanning) {
+ d3_behavior_zoomMoved = true;
+ d3_behavior_zoomTo(d3_behavior_zoomXyz[2], d3.svg.mouse(d3_behavior_zoomTarget), d3_behavior_zoomPanning);
+ }
+}
+
+function d3_behavior_zoomMouseup() {
+ if (d3_behavior_zoomPanning) {
+ if (d3_behavior_zoomMoved && d3_behavior_zoomEventTarget === d3.event.target) {
+ d3_behavior_zoomStopClick = true;
+ }
+ d3_behavior_zoomMousemove();
+ d3_behavior_zoomPanning = null;
+ }
+}
+
+function d3_behavior_zoomClick() {
+ if (d3_behavior_zoomStopClick && d3_behavior_zoomEventTarget === d3.event.target) {
+ d3.event.stopPropagation();
+ d3.event.preventDefault();
+ d3_behavior_zoomStopClick = false;
+ d3_behavior_zoomEventTarget = null;
+ }
+}
+
+function d3_behavior_zoomTo(z, x0, x1) {
+ var K = Math.pow(2, (d3_behavior_zoomXyz[2] = z) - x1[2]),
+ x = d3_behavior_zoomXyz[0] = x0[0] - K * x1[0],
+ y = d3_behavior_zoomXyz[1] = x0[1] - K * x1[1],
+ o = d3.event, // Events can be reentrant (e.g., focus).
+ k = Math.pow(2, z);
+
+ d3.event = {
+ scale: k,
+ translate: [x, y],
+ transform: function(sx, sy) {
+ if (sx) transform(sx, x);
+ if (sy) transform(sy, y);
+ }
+ };
+
+ function transform(scale, o) {
+ var domain = scale.__domain || (scale.__domain = scale.domain()),
+ range = scale.range().map(function(v) { return (v - o) / k; });
+ scale.domain(domain).domain(range.map(scale.invert));
+ }
+
+ try {
+ d3_behavior_zoomDispatch.apply(d3_behavior_zoomTarget, d3_behavior_zoomArguments);
+ } finally {
+ d3.event = o;
+ }
+
+ o.preventDefault();
+}
+})();
diff --git a/media/d3.layout.js b/media/d3.layout.js
new file mode 100644
index 00000000..2bfb9d32
--- /dev/null
+++ b/media/d3.layout.js
@@ -0,0 +1,1890 @@
+(function(){d3.layout = {};
+// Implements hierarchical edge bundling using Holten's algorithm. For each
+// input link, a path is computed that travels through the tree, up the parent
+// hierarchy to the least common ancestor, and then back down to the destination
+// node. Each path is simply an array of nodes.
+d3.layout.bundle = function() {
+ return function(links) {
+ var paths = [],
+ i = -1,
+ n = links.length;
+ while (++i < n) paths.push(d3_layout_bundlePath(links[i]));
+ return paths;
+ };
+};
+
+function d3_layout_bundlePath(link) {
+ var start = link.source,
+ end = link.target,
+ lca = d3_layout_bundleLeastCommonAncestor(start, end),
+ points = [start];
+ while (start !== lca) {
+ start = start.parent;
+ points.push(start);
+ }
+ var k = points.length;
+ while (end !== lca) {
+ points.splice(k, 0, end);
+ end = end.parent;
+ }
+ return points;
+}
+
+function d3_layout_bundleAncestors(node) {
+ var ancestors = [],
+ parent = node.parent;
+ while (parent != null) {
+ ancestors.push(node);
+ node = parent;
+ parent = parent.parent;
+ }
+ ancestors.push(node);
+ return ancestors;
+}
+
+function d3_layout_bundleLeastCommonAncestor(a, b) {
+ if (a === b) return a;
+ var aNodes = d3_layout_bundleAncestors(a),
+ bNodes = d3_layout_bundleAncestors(b),
+ aNode = aNodes.pop(),
+ bNode = bNodes.pop(),
+ sharedNode = null;
+ while (aNode === bNode) {
+ sharedNode = aNode;
+ aNode = aNodes.pop();
+ bNode = bNodes.pop();
+ }
+ return sharedNode;
+}
+d3.layout.chord = function() {
+ var chord = {},
+ chords,
+ groups,
+ matrix,
+ n,
+ padding = 0,
+ sortGroups,
+ sortSubgroups,
+ sortChords;
+
+ function relayout() {
+ var subgroups = {},
+ groupSums = [],
+ groupIndex = d3.range(n),
+ subgroupIndex = [],
+ k,
+ x,
+ x0,
+ i,
+ j;
+
+ chords = [];
+ groups = [];
+
+ // Compute the sum.
+ k = 0, i = -1; while (++i < n) {
+ x = 0, j = -1; while (++j < n) {
+ x += matrix[i][j];
+ }
+ groupSums.push(x);
+ subgroupIndex.push(d3.range(n));
+ k += x;
+ }
+
+ // Sort groups…
+ if (sortGroups) {
+ groupIndex.sort(function(a, b) {
+ return sortGroups(groupSums[a], groupSums[b]);
+ });
+ }
+
+ // Sort subgroups…
+ if (sortSubgroups) {
+ subgroupIndex.forEach(function(d, i) {
+ d.sort(function(a, b) {
+ return sortSubgroups(matrix[i][a], matrix[i][b]);
+ });
+ });
+ }
+
+ // Convert the sum to scaling factor for [0, 2pi].
+ // TODO Allow start and end angle to be specified.
+ // TODO Allow padding to be specified as percentage?
+ k = (2 * Math.PI - padding * n) / k;
+
+ // Compute the start and end angle for each group and subgroup.
+ x = 0, i = -1; while (++i < n) {
+ x0 = x, j = -1; while (++j < n) {
+ var di = groupIndex[i],
+ dj = subgroupIndex[i][j],
+ v = matrix[di][dj];
+ subgroups[di + "-" + dj] = {
+ index: di,
+ subindex: dj,
+ startAngle: x,
+ endAngle: x += v * k,
+ value: v
+ };
+ }
+ groups.push({
+ index: di,
+ startAngle: x0,
+ endAngle: x,
+ value: (x - x0) / k
+ });
+ x += padding;
+ }
+
+ // Generate chords for each (non-empty) subgroup-subgroup link.
+ i = -1; while (++i < n) {
+ j = i - 1; while (++j < n) {
+ var source = subgroups[i + "-" + j],
+ target = subgroups[j + "-" + i];
+ if (source.value || target.value) {
+ chords.push(source.value < target.value
+ ? {source: target, target: source}
+ : {source: source, target: target});
+ }
+ }
+ }
+
+ if (sortChords) resort();
+ }
+
+ function resort() {
+ chords.sort(function(a, b) {
+ return sortChords(a.target.value, b.target.value);
+ });
+ }
+
+ chord.matrix = function(x) {
+ if (!arguments.length) return matrix;
+ n = (matrix = x) && matrix.length;
+ chords = groups = null;
+ return chord;
+ };
+
+ chord.padding = function(x) {
+ if (!arguments.length) return padding;
+ padding = x;
+ chords = groups = null;
+ return chord;
+ };
+
+ chord.sortGroups = function(x) {
+ if (!arguments.length) return sortGroups;
+ sortGroups = x;
+ chords = groups = null;
+ return chord;
+ };
+
+ chord.sortSubgroups = function(x) {
+ if (!arguments.length) return sortSubgroups;
+ sortSubgroups = x;
+ chords = null;
+ return chord;
+ };
+
+ chord.sortChords = function(x) {
+ if (!arguments.length) return sortChords;
+ sortChords = x;
+ if (chords) resort();
+ return chord;
+ };
+
+ chord.chords = function() {
+ if (!chords) relayout();
+ return chords;
+ };
+
+ chord.groups = function() {
+ if (!groups) relayout();
+ return groups;
+ };
+
+ return chord;
+};
+// A rudimentary force layout using Gauss-Seidel.
+d3.layout.force = function() {
+ var force = {},
+ event = d3.dispatch("tick"),
+ size = [1, 1],
+ drag,
+ alpha,
+ friction = .9,
+ linkDistance = d3_layout_forceLinkDistance,
+ linkStrength = d3_layout_forceLinkStrength,
+ charge = -30,
+ gravity = .1,
+ theta = .8,
+ interval,
+ nodes = [],
+ links = [],
+ distances,
+ strengths,
+ charges;
+
+ function repulse(node) {
+ return function(quad, x1, y1, x2, y2) {
+ if (quad.point !== node) {
+ var dx = quad.cx - node.x,
+ dy = quad.cy - node.y,
+ dn = 1 / Math.sqrt(dx * dx + dy * dy);
+
+ /* Barnes-Hut criterion. */
+ if ((x2 - x1) * dn < theta) {
+ var k = quad.charge * dn * dn;
+ node.px -= dx * k;
+ node.py -= dy * k;
+ return true;
+ }
+
+ if (quad.point && isFinite(dn)) {
+ var k = quad.pointCharge * dn * dn;
+ node.px -= dx * k;
+ node.py -= dy * k;
+ }
+ }
+ return !quad.charge;
+ };
+ }
+
+ function tick() {
+ var n = nodes.length,
+ m = links.length,
+ q,
+ i, // current index
+ o, // current object
+ s, // current source
+ t, // current target
+ l, // current distance
+ k, // current force
+ x, // x-distance
+ y; // y-distance
+
+ // gauss-seidel relaxation for links
+ for (i = 0; i < m; ++i) {
+ o = links[i];
+ s = o.source;
+ t = o.target;
+ x = t.x - s.x;
+ y = t.y - s.y;
+ if (l = (x * x + y * y)) {
+ l = alpha * strengths[i] * ((l = Math.sqrt(l)) - distances[i]) / l;
+ x *= l;
+ y *= l;
+ t.x -= x * (k = s.weight / (t.weight + s.weight));
+ t.y -= y * k;
+ s.x += x * (k = 1 - k);
+ s.y += y * k;
+ }
+ }
+
+ // apply gravity forces
+ if (k = alpha * gravity) {
+ x = size[0] / 2;
+ y = size[1] / 2;
+ i = -1; if (k) while (++i < n) {
+ o = nodes[i];
+ o.x += (x - o.x) * k;
+ o.y += (y - o.y) * k;
+ }
+ }
+
+ // compute quadtree center of mass and apply charge forces
+ if (charge) {
+ d3_layout_forceAccumulate(q = d3.geom.quadtree(nodes), alpha, charges);
+ i = -1; while (++i < n) {
+ if (!(o = nodes[i]).fixed) {
+ q.visit(repulse(o));
+ }
+ }
+ }
+
+ // position verlet integration
+ i = -1; while (++i < n) {
+ o = nodes[i];
+ if (o.fixed) {
+ o.x = o.px;
+ o.y = o.py;
+ } else {
+ o.x -= (o.px - (o.px = o.x)) * friction;
+ o.y -= (o.py - (o.py = o.y)) * friction;
+ }
+ }
+
+ event.tick.dispatch({type: "tick", alpha: alpha});
+
+ // simulated annealing, basically
+ return (alpha *= .99) < .005;
+ }
+
+ force.on = function(type, listener) {
+ event[type].add(listener);
+ return force;
+ };
+
+ force.nodes = function(x) {
+ if (!arguments.length) return nodes;
+ nodes = x;
+ return force;
+ };
+
+ force.links = function(x) {
+ if (!arguments.length) return links;
+ links = x;
+ return force;
+ };
+
+ force.size = function(x) {
+ if (!arguments.length) return size;
+ size = x;
+ return force;
+ };
+
+ force.linkDistance = function(x) {
+ if (!arguments.length) return linkDistance;
+ linkDistance = d3.functor(x);
+ return force;
+ };
+
+ // For backwards-compatibility.
+ force.distance = force.linkDistance;
+
+ force.linkStrength = function(x) {
+ if (!arguments.length) return linkStrength;
+ linkStrength = d3.functor(x);
+ return force;
+ };
+
+ force.friction = function(x) {
+ if (!arguments.length) return friction;
+ friction = x;
+ return force;
+ };
+
+ force.charge = function(x) {
+ if (!arguments.length) return charge;
+ charge = typeof x === "function" ? x : +x;
+ return force;
+ };
+
+ force.gravity = function(x) {
+ if (!arguments.length) return gravity;
+ gravity = x;
+ return force;
+ };
+
+ force.theta = function(x) {
+ if (!arguments.length) return theta;
+ theta = x;
+ return force;
+ };
+
+ force.start = function() {
+ var i,
+ j,
+ n = nodes.length,
+ m = links.length,
+ w = size[0],
+ h = size[1],
+ neighbors,
+ o;
+
+ for (i = 0; i < n; ++i) {
+ (o = nodes[i]).index = i;
+ o.weight = 0;
+ }
+
+ distances = [];
+ strengths = [];
+ for (i = 0; i < m; ++i) {
+ o = links[i];
+ if (typeof o.source == "number") o.source = nodes[o.source];
+ if (typeof o.target == "number") o.target = nodes[o.target];
+ distances[i] = linkDistance.call(this, o, i);
+ strengths[i] = linkStrength.call(this, o, i);
+ ++o.source.weight;
+ ++o.target.weight;
+ }
+
+ for (i = 0; i < n; ++i) {
+ o = nodes[i];
+ if (isNaN(o.x)) o.x = position("x", w);
+ if (isNaN(o.y)) o.y = position("y", h);
+ if (isNaN(o.px)) o.px = o.x;
+ if (isNaN(o.py)) o.py = o.y;
+ }
+
+ charges = [];
+ if (typeof charge === "function") {
+ for (i = 0; i < n; ++i) {
+ charges[i] = +charge.call(this, nodes[i], i);
+ }
+ } else {
+ for (i = 0; i < n; ++i) {
+ charges[i] = charge;
+ }
+ }
+
+ // initialize node position based on first neighbor
+ function position(dimension, size) {
+ var neighbors = neighbor(i),
+ j = -1,
+ m = neighbors.length,
+ x;
+ while (++j < m) if (!isNaN(x = neighbors[j][dimension])) return x;
+ return Math.random() * size;
+ }
+
+ // initialize neighbors lazily
+ function neighbor() {
+ if (!neighbors) {
+ neighbors = [];
+ for (j = 0; j < n; ++j) {
+ neighbors[j] = [];
+ }
+ for (j = 0; j < m; ++j) {
+ var o = links[j];
+ neighbors[o.source.index].push(o.target);
+ neighbors[o.target.index].push(o.source);
+ }
+ }
+ return neighbors[i];
+ }
+
+ return force.resume();
+ };
+
+ force.resume = function() {
+ alpha = .1;
+ d3.timer(tick);
+ return force;
+ };
+
+ force.stop = function() {
+ alpha = 0;
+ return force;
+ };
+
+ // use `node.call(force.drag)` to make nodes draggable
+ force.drag = function() {
+ if (!drag) drag = d3.behavior.drag()
+ .on("dragstart", dragstart)
+ .on("drag", d3_layout_forceDrag)
+ .on("dragend", d3_layout_forceDragEnd);
+
+ this.on("mouseover.force", d3_layout_forceDragOver)
+ .on("mouseout.force", d3_layout_forceDragOut)
+ .call(drag);
+ };
+
+ function dragstart(d) {
+ d3_layout_forceDragOver(d3_layout_forceDragNode = d);
+ d3_layout_forceDragForce = force;
+ }
+
+ return force;
+};
+
+var d3_layout_forceDragForce,
+ d3_layout_forceDragNode;
+
+function d3_layout_forceDragOver(d) {
+ d.fixed |= 2;
+}
+
+function d3_layout_forceDragOut(d) {
+ if (d !== d3_layout_forceDragNode) d.fixed &= 1;
+}
+
+function d3_layout_forceDragEnd() {
+ d3_layout_forceDrag();
+ d3_layout_forceDragNode.fixed &= 1;
+ d3_layout_forceDragForce = d3_layout_forceDragNode = null;
+}
+
+function d3_layout_forceDrag() {
+ d3_layout_forceDragNode.px += d3.event.dx;
+ d3_layout_forceDragNode.py += d3.event.dy;
+ d3_layout_forceDragForce.resume(); // restart annealing
+}
+
+function d3_layout_forceAccumulate(quad, alpha, charges) {
+ var cx = 0,
+ cy = 0;
+ quad.charge = 0;
+ if (!quad.leaf) {
+ var nodes = quad.nodes,
+ n = nodes.length,
+ i = -1,
+ c;
+ while (++i < n) {
+ c = nodes[i];
+ if (c == null) continue;
+ d3_layout_forceAccumulate(c, alpha, charges);
+ quad.charge += c.charge;
+ cx += c.charge * c.cx;
+ cy += c.charge * c.cy;
+ }
+ }
+ if (quad.point) {
+ // jitter internal nodes that are coincident
+ if (!quad.leaf) {
+ quad.point.x += Math.random() - .5;
+ quad.point.y += Math.random() - .5;
+ }
+ var k = alpha * charges[quad.point.index];
+ quad.charge += quad.pointCharge = k;
+ cx += k * quad.point.x;
+ cy += k * quad.point.y;
+ }
+ quad.cx = cx / quad.charge;
+ quad.cy = cy / quad.charge;
+}
+
+function d3_layout_forceLinkDistance(link) {
+ return 20;
+}
+
+function d3_layout_forceLinkStrength(link) {
+ return 1;
+}
+d3.layout.partition = function() {
+ var hierarchy = d3.layout.hierarchy(),
+ size = [1, 1]; // width, height
+
+ function position(node, x, dx, dy) {
+ var children = node.children;
+ node.x = x;
+ node.y = node.depth * dy;
+ node.dx = dx;
+ node.dy = dy;
+ if (children && (n = children.length)) {
+ var i = -1,
+ n,
+ c,
+ d;
+ dx = node.value ? dx / node.value : 0;
+ while (++i < n) {
+ position(c = children[i], x, d = c.value * dx, dy);
+ x += d;
+ }
+ }
+ }
+
+ function depth(node) {
+ var children = node.children,
+ d = 0;
+ if (children && (n = children.length)) {
+ var i = -1,
+ n;
+ while (++i < n) d = Math.max(d, depth(children[i]));
+ }
+ return 1 + d;
+ }
+
+ function partition(d, i) {
+ var nodes = hierarchy.call(this, d, i);
+ position(nodes[0], 0, size[0], size[1] / depth(nodes[0]));
+ return nodes;
+ }
+
+ partition.size = function(x) {
+ if (!arguments.length) return size;
+ size = x;
+ return partition;
+ };
+
+ return d3_layout_hierarchyRebind(partition, hierarchy);
+};
+d3.layout.pie = function() {
+ var value = Number,
+ sort = null,
+ startAngle = 0,
+ endAngle = 2 * Math.PI;
+
+ function pie(data, i) {
+
+ // Compute the start angle.
+ var a = +(typeof startAngle === "function"
+ ? startAngle.apply(this, arguments)
+ : startAngle);
+
+ // Compute the angular range (end - start).
+ var k = (typeof endAngle === "function"
+ ? endAngle.apply(this, arguments)
+ : endAngle) - startAngle;
+
+ // Optionally sort the data.
+ var index = d3.range(data.length);
+ if (sort != null) index.sort(function(i, j) {
+ return sort(data[i], data[j]);
+ });
+
+ // Compute the numeric values for each data element.
+ var values = data.map(value);
+
+ // Convert k into a scale factor from value to angle, using the sum.
+ k /= values.reduce(function(p, d) { return p + d; }, 0);
+
+ // Compute the arcs!
+ var arcs = index.map(function(i) {
+ return {
+ data: data[i],
+ value: d = values[i],
+ startAngle: a,
+ endAngle: a += d * k
+ };
+ });
+
+ // Return the arcs in the original data's order.
+ return data.map(function(d, i) {
+ return arcs[index[i]];
+ });
+ }
+
+ /**
+ * Specifies the value function *x*, which returns a nonnegative numeric value
+ * for each datum. The default value function is `Number`. The value function
+ * is passed two arguments: the current datum and the current index.
+ */
+ pie.value = function(x) {
+ if (!arguments.length) return value;
+ value = x;
+ return pie;
+ };
+
+ /**
+ * Specifies a sort comparison operator *x*. The comparator is passed two data
+ * elements from the data array, a and b; it returns a negative value if a is
+ * less than b, a positive value if a is greater than b, and zero if a equals
+ * b.
+ */
+ pie.sort = function(x) {
+ if (!arguments.length) return sort;
+ sort = x;
+ return pie;
+ };
+
+ /**
+ * Specifies the overall start angle of the pie chart. Defaults to 0. The
+ * start angle can be specified either as a constant or as a function; in the
+ * case of a function, it is evaluated once per array (as opposed to per
+ * element).
+ */
+ pie.startAngle = function(x) {
+ if (!arguments.length) return startAngle;
+ startAngle = x;
+ return pie;
+ };
+
+ /**
+ * Specifies the overall end angle of the pie chart. Defaults to 2π. The
+ * end angle can be specified either as a constant or as a function; in the
+ * case of a function, it is evaluated once per array (as opposed to per
+ * element).
+ */
+ pie.endAngle = function(x) {
+ if (!arguments.length) return endAngle;
+ endAngle = x;
+ return pie;
+ };
+
+ return pie;
+};
+// data is two-dimensional array of x,y; we populate y0
+d3.layout.stack = function() {
+ var values = Object,
+ order = d3_layout_stackOrders["default"],
+ offset = d3_layout_stackOffsets["zero"],
+ out = d3_layout_stackOut,
+ x = d3_layout_stackX,
+ y = d3_layout_stackY;
+
+ function stack(data, index) {
+
+ // Convert series to canonical two-dimensional representation.
+ var series = data.map(function(d, i) {
+ return values.call(stack, d, i);
+ });
+
+ // Convert each series to canonical [[x,y]] representation.
+ var points = series.map(function(d, i) {
+ return d.map(function(v, i) {
+ return [x.call(stack, v, i), y.call(stack, v, i)];
+ });
+ });
+
+ // Compute the order of series, and permute them.
+ var orders = order.call(stack, points, index);
+ series = d3.permute(series, orders);
+ points = d3.permute(points, orders);
+
+ // Compute the baseline…
+ var offsets = offset.call(stack, points, index);
+
+ // And propagate it to other series.
+ var n = series.length,
+ m = series[0].length,
+ i,
+ j,
+ o;
+ for (j = 0; j < m; ++j) {
+ out.call(stack, series[0][j], o = offsets[j], points[0][j][1]);
+ for (i = 1; i < n; ++i) {
+ out.call(stack, series[i][j], o += points[i - 1][j][1], points[i][j][1]);
+ }
+ }
+
+ return data;
+ }
+
+ stack.values = function(x) {
+ if (!arguments.length) return values;
+ values = x;
+ return stack;
+ };
+
+ stack.order = function(x) {
+ if (!arguments.length) return order;
+ order = typeof x === "function" ? x : d3_layout_stackOrders[x];
+ return stack;
+ };
+
+ stack.offset = function(x) {
+ if (!arguments.length) return offset;
+ offset = typeof x === "function" ? x : d3_layout_stackOffsets[x];
+ return stack;
+ };
+
+ stack.x = function(z) {
+ if (!arguments.length) return x;
+ x = z;
+ return stack;
+ };
+
+ stack.y = function(z) {
+ if (!arguments.length) return y;
+ y = z;
+ return stack;
+ };
+
+ stack.out = function(z) {
+ if (!arguments.length) return out;
+ out = z;
+ return stack;
+ };
+
+ return stack;
+}
+
+function d3_layout_stackX(d) {
+ return d.x;
+}
+
+function d3_layout_stackY(d) {
+ return d.y;
+}
+
+function d3_layout_stackOut(d, y0, y) {
+ d.y0 = y0;
+ d.y = y;
+}
+
+var d3_layout_stackOrders = {
+
+ "inside-out": function(data) {
+ var n = data.length,
+ i,
+ j,
+ max = data.map(d3_layout_stackMaxIndex),
+ sums = data.map(d3_layout_stackReduceSum),
+ index = d3.range(n).sort(function(a, b) { return max[a] - max[b]; }),
+ top = 0,
+ bottom = 0,
+ tops = [],
+ bottoms = [];
+ for (i = 0; i < n; ++i) {
+ j = index[i];
+ if (top < bottom) {
+ top += sums[j];
+ tops.push(j);
+ } else {
+ bottom += sums[j];
+ bottoms.push(j);
+ }
+ }
+ return bottoms.reverse().concat(tops);
+ },
+
+ "reverse": function(data) {
+ return d3.range(data.length).reverse();
+ },
+
+ "default": function(data) {
+ return d3.range(data.length);
+ }
+
+};
+
+var d3_layout_stackOffsets = {
+
+ "silhouette": function(data) {
+ var n = data.length,
+ m = data[0].length,
+ sums = [],
+ max = 0,
+ i,
+ j,
+ o,
+ y0 = [];
+ for (j = 0; j < m; ++j) {
+ for (i = 0, o = 0; i < n; i++) o += data[i][j][1];
+ if (o > max) max = o;
+ sums.push(o);
+ }
+ for (j = 0; j < m; ++j) {
+ y0[j] = (max - sums[j]) / 2;
+ }
+ return y0;
+ },
+
+ "wiggle": function(data) {
+ var n = data.length,
+ x = data[0],
+ m = x.length,
+ max = 0,
+ i,
+ j,
+ k,
+ s1,
+ s2,
+ s3,
+ dx,
+ o,
+ o0,
+ y0 = [];
+ y0[0] = o = o0 = 0;
+ for (j = 1; j < m; ++j) {
+ for (i = 0, s1 = 0; i < n; ++i) s1 += data[i][j][1];
+ for (i = 0, s2 = 0, dx = x[j][0] - x[j - 1][0]; i < n; ++i) {
+ for (k = 0, s3 = (data[i][j][1] - data[i][j - 1][1]) / (2 * dx); k < i; ++k) {
+ s3 += (data[k][j][1] - data[k][j - 1][1]) / dx;
+ }
+ s2 += s3 * data[i][j][1];
+ }
+ y0[j] = o -= s1 ? s2 / s1 * dx : 0;
+ if (o < o0) o0 = o;
+ }
+ for (j = 0; j < m; ++j) y0[j] -= o0;
+ return y0;
+ },
+
+ "expand": function(data) {
+ var n = data.length,
+ m = data[0].length,
+ k = 1 / n,
+ i,
+ j,
+ o,
+ y0 = [];
+ for (j = 0; j < m; ++j) {
+ for (i = 0, o = 0; i < n; i++) o += data[i][j][1];
+ if (o) for (i = 0; i < n; i++) data[i][j][1] /= o;
+ else for (i = 0; i < n; i++) data[i][j][1] = k;
+ }
+ for (j = 0; j < m; ++j) y0[j] = 0;
+ return y0;
+ },
+
+ "zero": function(data) {
+ var j = -1,
+ m = data[0].length,
+ y0 = [];
+ while (++j < m) y0[j] = 0;
+ return y0;
+ }
+
+};
+
+function d3_layout_stackMaxIndex(array) {
+ var i = 1,
+ j = 0,
+ v = array[0][1],
+ k,
+ n = array.length;
+ for (; i < n; ++i) {
+ if ((k = array[i][1]) > v) {
+ j = i;
+ v = k;
+ }
+ }
+ return j;
+}
+
+function d3_layout_stackReduceSum(d) {
+ return d.reduce(d3_layout_stackSum, 0);
+}
+
+function d3_layout_stackSum(p, d) {
+ return p + d[1];
+}
+d3.layout.histogram = function() {
+ var frequency = true,
+ valuer = Number,
+ ranger = d3_layout_histogramRange,
+ binner = d3_layout_histogramBinSturges;
+
+ function histogram(data, i) {
+ var bins = [],
+ values = data.map(valuer, this),
+ range = ranger.call(this, values, i),
+ thresholds = binner.call(this, range, values, i),
+ bin,
+ i = -1,
+ n = values.length,
+ m = thresholds.length - 1,
+ k = frequency ? 1 : 1 / n,
+ x;
+
+ // Initialize the bins.
+ while (++i < m) {
+ bin = bins[i] = [];
+ bin.dx = thresholds[i + 1] - (bin.x = thresholds[i]);
+ bin.y = 0;
+ }
+
+ // Fill the bins, ignoring values outside the range.
+ i = -1; while(++i < n) {
+ x = values[i];
+ if ((x >= range[0]) && (x <= range[1])) {
+ bin = bins[d3.bisect(thresholds, x, 1, m) - 1];
+ bin.y += k;
+ bin.push(data[i]);
+ }
+ }
+
+ return bins;
+ }
+
+ // Specifies how to extract a value from the associated data. The default
+ // value function is `Number`, which is equivalent to the identity function.
+ histogram.value = function(x) {
+ if (!arguments.length) return valuer;
+ valuer = x;
+ return histogram;
+ };
+
+ // Specifies the range of the histogram. Values outside the specified range
+ // will be ignored. The argument `x` may be specified either as a two-element
+ // array representing the minimum and maximum value of the range, or as a
+ // function that returns the range given the array of values and the current
+ // index `i`. The default range is the extent (minimum and maximum) of the
+ // values.
+ histogram.range = function(x) {
+ if (!arguments.length) return ranger;
+ ranger = d3.functor(x);
+ return histogram;
+ };
+
+ // Specifies how to bin values in the histogram. The argument `x` may be
+ // specified as a number, in which case the range of values will be split
+ // uniformly into the given number of bins. Or, `x` may be an array of
+ // threshold values, defining the bins; the specified array must contain the
+ // rightmost (upper) value, thus specifying n + 1 values for n bins. Or, `x`
+ // may be a function which is evaluated, being passed the range, the array of
+ // values, and the current index `i`, returning an array of thresholds. The
+ // default bin function will divide the values into uniform bins using
+ // Sturges' formula.
+ histogram.bins = function(x) {
+ if (!arguments.length) return binner;
+ binner = typeof x === "number"
+ ? function(range) { return d3_layout_histogramBinFixed(range, x); }
+ : d3.functor(x);
+ return histogram;
+ };
+
+ // Specifies whether the histogram's `y` value is a count (frequency) or a
+ // probability (density). The default value is true.
+ histogram.frequency = function(x) {
+ if (!arguments.length) return frequency;
+ frequency = !!x;
+ return histogram;
+ };
+
+ return histogram;
+};
+
+function d3_layout_histogramBinSturges(range, values) {
+ return d3_layout_histogramBinFixed(range, Math.ceil(Math.log(values.length) / Math.LN2 + 1));
+}
+
+function d3_layout_histogramBinFixed(range, n) {
+ var x = -1,
+ b = +range[0],
+ m = (range[1] - b) / n,
+ f = [];
+ while (++x <= n) f[x] = m * x + b;
+ return f;
+}
+
+function d3_layout_histogramRange(values) {
+ return [d3.min(values), d3.max(values)];
+}
+d3.layout.hierarchy = function() {
+ var sort = d3_layout_hierarchySort,
+ children = d3_layout_hierarchyChildren,
+ value = d3_layout_hierarchyValue;
+
+ // Recursively compute the node depth and value.
+ // Also converts the data representation into a standard hierarchy structure.
+ function recurse(data, depth, nodes) {
+ var childs = children.call(hierarchy, data, depth),
+ node = d3_layout_hierarchyInline ? data : {data: data};
+ node.depth = depth;
+ nodes.push(node);
+ if (childs && (n = childs.length)) {
+ var i = -1,
+ n,
+ c = node.children = [],
+ v = 0,
+ j = depth + 1;
+ while (++i < n) {
+ d = recurse(childs[i], j, nodes);
+ d.parent = node;
+ c.push(d);
+ v += d.value;
+ }
+ if (sort) c.sort(sort);
+ if (value) node.value = v;
+ } else if (value) {
+ node.value = +value.call(hierarchy, data, depth) || 0;
+ }
+ return node;
+ }
+
+ // Recursively re-evaluates the node value.
+ function revalue(node, depth) {
+ var children = node.children,
+ v = 0;
+ if (children && (n = children.length)) {
+ var i = -1,
+ n,
+ j = depth + 1;
+ while (++i < n) v += revalue(children[i], j);
+ } else if (value) {
+ v = +value.call(hierarchy, d3_layout_hierarchyInline ? node : node.data, depth) || 0;
+ }
+ if (value) node.value = v;
+ return v;
+ }
+
+ function hierarchy(d) {
+ var nodes = [];
+ recurse(d, 0, nodes);
+ return nodes;
+ }
+
+ hierarchy.sort = function(x) {
+ if (!arguments.length) return sort;
+ sort = x;
+ return hierarchy;
+ };
+
+ hierarchy.children = function(x) {
+ if (!arguments.length) return children;
+ children = x;
+ return hierarchy;
+ };
+
+ hierarchy.value = function(x) {
+ if (!arguments.length) return value;
+ value = x;
+ return hierarchy;
+ };
+
+ // Re-evaluates the `value` property for the specified hierarchy.
+ hierarchy.revalue = function(root) {
+ revalue(root, 0);
+ return root;
+ };
+
+ return hierarchy;
+};
+
+// A method assignment helper for hierarchy subclasses.
+function d3_layout_hierarchyRebind(object, hierarchy) {
+ object.sort = d3.rebind(object, hierarchy.sort);
+ object.children = d3.rebind(object, hierarchy.children);
+ object.links = d3_layout_hierarchyLinks;
+ object.value = d3.rebind(object, hierarchy.value);
+
+ // If the new API is used, enabling inlining.
+ object.nodes = function(d) {
+ d3_layout_hierarchyInline = true;
+ return (object.nodes = object)(d);
+ };
+
+ return object;
+}
+
+function d3_layout_hierarchyChildren(d) {
+ return d.children;
+}
+
+function d3_layout_hierarchyValue(d) {
+ return d.value;
+}
+
+function d3_layout_hierarchySort(a, b) {
+ return b.value - a.value;
+}
+
+// Returns an array source+target objects for the specified nodes.
+function d3_layout_hierarchyLinks(nodes) {
+ return d3.merge(nodes.map(function(parent) {
+ return (parent.children || []).map(function(child) {
+ return {source: parent, target: child};
+ });
+ }));
+}
+
+// For backwards-compatibility, don't enable inlining by default.
+var d3_layout_hierarchyInline = false;
+d3.layout.pack = function() {
+ var hierarchy = d3.layout.hierarchy().sort(d3_layout_packSort),
+ size = [1, 1];
+
+ function pack(d, i) {
+ var nodes = hierarchy.call(this, d, i),
+ root = nodes[0];
+
+ // Recursively compute the layout.
+ root.x = 0;
+ root.y = 0;
+ d3_layout_packTree(root);
+
+ // Scale the layout to fit the requested size.
+ var w = size[0],
+ h = size[1],
+ k = 1 / Math.max(2 * root.r / w, 2 * root.r / h);
+ d3_layout_packTransform(root, w / 2, h / 2, k);
+
+ return nodes;
+ }
+
+ pack.size = function(x) {
+ if (!arguments.length) return size;
+ size = x;
+ return pack;
+ };
+
+ return d3_layout_hierarchyRebind(pack, hierarchy);
+};
+
+function d3_layout_packSort(a, b) {
+ return a.value - b.value;
+}
+
+function d3_layout_packInsert(a, b) {
+ var c = a._pack_next;
+ a._pack_next = b;
+ b._pack_prev = a;
+ b._pack_next = c;
+ c._pack_prev = b;
+}
+
+function d3_layout_packSplice(a, b) {
+ a._pack_next = b;
+ b._pack_prev = a;
+}
+
+function d3_layout_packIntersects(a, b) {
+ var dx = b.x - a.x,
+ dy = b.y - a.y,
+ dr = a.r + b.r;
+ return (dr * dr - dx * dx - dy * dy) > .001; // within epsilon
+}
+
+function d3_layout_packCircle(nodes) {
+ var xMin = Infinity,
+ xMax = -Infinity,
+ yMin = Infinity,
+ yMax = -Infinity,
+ n = nodes.length,
+ a, b, c, j, k;
+
+ function bound(node) {
+ xMin = Math.min(node.x - node.r, xMin);
+ xMax = Math.max(node.x + node.r, xMax);
+ yMin = Math.min(node.y - node.r, yMin);
+ yMax = Math.max(node.y + node.r, yMax);
+ }
+
+ // Create node links.
+ nodes.forEach(d3_layout_packLink);
+
+ // Create first node.
+ a = nodes[0];
+ a.x = -a.r;
+ a.y = 0;
+ bound(a);
+
+ // Create second node.
+ if (n > 1) {
+ b = nodes[1];
+ b.x = b.r;
+ b.y = 0;
+ bound(b);
+
+ // Create third node and build chain.
+ if (n > 2) {
+ c = nodes[2];
+ d3_layout_packPlace(a, b, c);
+ bound(c);
+ d3_layout_packInsert(a, c);
+ a._pack_prev = c;
+ d3_layout_packInsert(c, b);
+ b = a._pack_next;
+
+ // Now iterate through the rest.
+ for (var i = 3; i < n; i++) {
+ d3_layout_packPlace(a, b, c = nodes[i]);
+
+ // Search for the closest intersection.
+ var isect = 0, s1 = 1, s2 = 1;
+ for (j = b._pack_next; j !== b; j = j._pack_next, s1++) {
+ if (d3_layout_packIntersects(j, c)) {
+ isect = 1;
+ break;
+ }
+ }
+ if (isect == 1) {
+ for (k = a._pack_prev; k !== j._pack_prev; k = k._pack_prev, s2++) {
+ if (d3_layout_packIntersects(k, c)) {
+ if (s2 < s1) {
+ isect = -1;
+ j = k;
+ }
+ break;
+ }
+ }
+ }
+
+ // Update node chain.
+ if (isect == 0) {
+ d3_layout_packInsert(a, c);
+ b = c;
+ bound(c);
+ } else if (isect > 0) {
+ d3_layout_packSplice(a, j);
+ b = j;
+ i--;
+ } else { // isect < 0
+ d3_layout_packSplice(j, b);
+ a = j;
+ i--;
+ }
+ }
+ }
+ }
+
+ // Re-center the circles and return the encompassing radius.
+ var cx = (xMin + xMax) / 2,
+ cy = (yMin + yMax) / 2,
+ cr = 0;
+ for (var i = 0; i < n; i++) {
+ var node = nodes[i];
+ node.x -= cx;
+ node.y -= cy;
+ cr = Math.max(cr, node.r + Math.sqrt(node.x * node.x + node.y * node.y));
+ }
+
+ // Remove node links.
+ nodes.forEach(d3_layout_packUnlink);
+
+ return cr;
+}
+
+function d3_layout_packLink(node) {
+ node._pack_next = node._pack_prev = node;
+}
+
+function d3_layout_packUnlink(node) {
+ delete node._pack_next;
+ delete node._pack_prev;
+}
+
+function d3_layout_packTree(node) {
+ var children = node.children;
+ if (children && children.length) {
+ children.forEach(d3_layout_packTree);
+ node.r = d3_layout_packCircle(children);
+ } else {
+ node.r = Math.sqrt(node.value);
+ }
+}
+
+function d3_layout_packTransform(node, x, y, k) {
+ var children = node.children;
+ node.x = (x += k * node.x);
+ node.y = (y += k * node.y);
+ node.r *= k;
+ if (children) {
+ var i = -1, n = children.length;
+ while (++i < n) d3_layout_packTransform(children[i], x, y, k);
+ }
+}
+
+function d3_layout_packPlace(a, b, c) {
+ var db = a.r + c.r,
+ dx = b.x - a.x,
+ dy = b.y - a.y;
+ if (db && (dx || dy)) {
+ var da = b.r + c.r,
+ dc = Math.sqrt(dx * dx + dy * dy),
+ cos = Math.max(-1, Math.min(1, (db * db + dc * dc - da * da) / (2 * db * dc))),
+ theta = Math.acos(cos),
+ x = cos * (db /= dc),
+ y = Math.sin(theta) * db;
+ c.x = a.x + x * dx + y * dy;
+ c.y = a.y + x * dy - y * dx;
+ } else {
+ c.x = a.x + db;
+ c.y = a.y;
+ }
+}
+// Implements a hierarchical layout using the cluster (or dendogram) algorithm.
+d3.layout.cluster = function() {
+ var hierarchy = d3.layout.hierarchy().sort(null).value(null),
+ separation = d3_layout_treeSeparation,
+ size = [1, 1]; // width, height
+
+ function cluster(d, i) {
+ var nodes = hierarchy.call(this, d, i),
+ root = nodes[0],
+ previousNode,
+ x = 0,
+ kx,
+ ky;
+
+ // First walk, computing the initial x & y values.
+ d3_layout_treeVisitAfter(root, function(node) {
+ var children = node.children;
+ if (children && children.length) {
+ node.x = d3_layout_clusterX(children);
+ node.y = d3_layout_clusterY(children);
+ } else {
+ node.x = previousNode ? x += separation(node, previousNode) : 0;
+ node.y = 0;
+ previousNode = node;
+ }
+ });
+
+ // Compute the left-most, right-most, and depth-most nodes for extents.
+ var left = d3_layout_clusterLeft(root),
+ right = d3_layout_clusterRight(root),
+ x0 = left.x - separation(left, right) / 2,
+ x1 = right.x + separation(right, left) / 2;
+
+ // Second walk, normalizing x & y to the desired size.
+ d3_layout_treeVisitAfter(root, function(node) {
+ node.x = (node.x - x0) / (x1 - x0) * size[0];
+ node.y = (1 - node.y / root.y) * size[1];
+ });
+
+ return nodes;
+ }
+
+ cluster.separation = function(x) {
+ if (!arguments.length) return separation;
+ separation = x;
+ return cluster;
+ };
+
+ cluster.size = function(x) {
+ if (!arguments.length) return size;
+ size = x;
+ return cluster;
+ };
+
+ return d3_layout_hierarchyRebind(cluster, hierarchy);
+};
+
+function d3_layout_clusterY(children) {
+ return 1 + d3.max(children, function(child) {
+ return child.y;
+ });
+}
+
+function d3_layout_clusterX(children) {
+ return children.reduce(function(x, child) {
+ return x + child.x;
+ }, 0) / children.length;
+}
+
+function d3_layout_clusterLeft(node) {
+ var children = node.children;
+ return children && children.length ? d3_layout_clusterLeft(children[0]) : node;
+}
+
+function d3_layout_clusterRight(node) {
+ var children = node.children, n;
+ return children && (n = children.length) ? d3_layout_clusterRight(children[n - 1]) : node;
+}
+// Node-link tree diagram using the Reingold-Tilford "tidy" algorithm
+d3.layout.tree = function() {
+ var hierarchy = d3.layout.hierarchy().sort(null).value(null),
+ separation = d3_layout_treeSeparation,
+ size = [1, 1]; // width, height
+
+ function tree(d, i) {
+ var nodes = hierarchy.call(this, d, i),
+ root = nodes[0];
+
+ function firstWalk(node, previousSibling) {
+ var children = node.children,
+ layout = node._tree;
+ if (children && (n = children.length)) {
+ var n,
+ firstChild = children[0],
+ previousChild,
+ ancestor = firstChild,
+ child,
+ i = -1;
+ while (++i < n) {
+ child = children[i];
+ firstWalk(child, previousChild);
+ ancestor = apportion(child, previousChild, ancestor);
+ previousChild = child;
+ }
+ d3_layout_treeShift(node);
+ var midpoint = .5 * (firstChild._tree.prelim + child._tree.prelim);
+ if (previousSibling) {
+ layout.prelim = previousSibling._tree.prelim + separation(node, previousSibling);
+ layout.mod = layout.prelim - midpoint;
+ } else {
+ layout.prelim = midpoint;
+ }
+ } else {
+ if (previousSibling) {
+ layout.prelim = previousSibling._tree.prelim + separation(node, previousSibling);
+ }
+ }
+ }
+
+ function secondWalk(node, x) {
+ node.x = node._tree.prelim + x;
+ var children = node.children;
+ if (children && (n = children.length)) {
+ var i = -1,
+ n;
+ x += node._tree.mod;
+ while (++i < n) {
+ secondWalk(children[i], x);
+ }
+ }
+ }
+
+ function apportion(node, previousSibling, ancestor) {
+ if (previousSibling) {
+ var vip = node,
+ vop = node,
+ vim = previousSibling,
+ vom = node.parent.children[0],
+ sip = vip._tree.mod,
+ sop = vop._tree.mod,
+ sim = vim._tree.mod,
+ som = vom._tree.mod,
+ shift;
+ while (vim = d3_layout_treeRight(vim), vip = d3_layout_treeLeft(vip), vim && vip) {
+ vom = d3_layout_treeLeft(vom);
+ vop = d3_layout_treeRight(vop);
+ vop._tree.ancestor = node;
+ shift = vim._tree.prelim + sim - vip._tree.prelim - sip + separation(vim, vip);
+ if (shift > 0) {
+ d3_layout_treeMove(d3_layout_treeAncestor(vim, node, ancestor), node, shift);
+ sip += shift;
+ sop += shift;
+ }
+ sim += vim._tree.mod;
+ sip += vip._tree.mod;
+ som += vom._tree.mod;
+ sop += vop._tree.mod;
+ }
+ if (vim && !d3_layout_treeRight(vop)) {
+ vop._tree.thread = vim;
+ vop._tree.mod += sim - sop;
+ }
+ if (vip && !d3_layout_treeLeft(vom)) {
+ vom._tree.thread = vip;
+ vom._tree.mod += sip - som;
+ ancestor = node;
+ }
+ }
+ return ancestor;
+ }
+
+ // Initialize temporary layout variables.
+ d3_layout_treeVisitAfter(root, function(node, previousSibling) {
+ node._tree = {
+ ancestor: node,
+ prelim: 0,
+ mod: 0,
+ change: 0,
+ shift: 0,
+ number: previousSibling ? previousSibling._tree.number + 1 : 0
+ };
+ });
+
+ // Compute the layout using Buchheim et al.'s algorithm.
+ firstWalk(root);
+ secondWalk(root, -root._tree.prelim);
+
+ // Compute the left-most, right-most, and depth-most nodes for extents.
+ var left = d3_layout_treeSearch(root, d3_layout_treeLeftmost),
+ right = d3_layout_treeSearch(root, d3_layout_treeRightmost),
+ deep = d3_layout_treeSearch(root, d3_layout_treeDeepest),
+ x0 = left.x - separation(left, right) / 2,
+ x1 = right.x + separation(right, left) / 2,
+ y1 = deep.depth || 1;
+
+ // Clear temporary layout variables; transform x and y.
+ d3_layout_treeVisitAfter(root, function(node) {
+ node.x = (node.x - x0) / (x1 - x0) * size[0];
+ node.y = node.depth / y1 * size[1];
+ delete node._tree;
+ });
+
+ return nodes;
+ }
+
+ tree.separation = function(x) {
+ if (!arguments.length) return separation;
+ separation = x;
+ return tree;
+ };
+
+ tree.size = function(x) {
+ if (!arguments.length) return size;
+ size = x;
+ return tree;
+ };
+
+ return d3_layout_hierarchyRebind(tree, hierarchy);
+};
+
+function d3_layout_treeSeparation(a, b) {
+ return a.parent == b.parent ? 1 : 2;
+}
+
+// function d3_layout_treeSeparationRadial(a, b) {
+// return (a.parent == b.parent ? 1 : 2) / a.depth;
+// }
+
+function d3_layout_treeLeft(node) {
+ var children = node.children;
+ return children && children.length ? children[0] : node._tree.thread;
+}
+
+function d3_layout_treeRight(node) {
+ var children = node.children,
+ n;
+ return children && (n = children.length) ? children[n - 1] : node._tree.thread;
+}
+
+function d3_layout_treeSearch(node, compare) {
+ var children = node.children;
+ if (children && (n = children.length)) {
+ var child,
+ n,
+ i = -1;
+ while (++i < n) {
+ if (compare(child = d3_layout_treeSearch(children[i], compare), node) > 0) {
+ node = child;
+ }
+ }
+ }
+ return node;
+}
+
+function d3_layout_treeRightmost(a, b) {
+ return a.x - b.x;
+}
+
+function d3_layout_treeLeftmost(a, b) {
+ return b.x - a.x;
+}
+
+function d3_layout_treeDeepest(a, b) {
+ return a.depth - b.depth;
+}
+
+function d3_layout_treeVisitAfter(node, callback) {
+ function visit(node, previousSibling) {
+ var children = node.children;
+ if (children && (n = children.length)) {
+ var child,
+ previousChild = null,
+ i = -1,
+ n;
+ while (++i < n) {
+ child = children[i];
+ visit(child, previousChild);
+ previousChild = child;
+ }
+ }
+ callback(node, previousSibling);
+ }
+ visit(node, null);
+}
+
+function d3_layout_treeShift(node) {
+ var shift = 0,
+ change = 0,
+ children = node.children,
+ i = children.length,
+ child;
+ while (--i >= 0) {
+ child = children[i]._tree;
+ child.prelim += shift;
+ child.mod += shift;
+ shift += child.shift + (change += child.change);
+ }
+}
+
+function d3_layout_treeMove(ancestor, node, shift) {
+ ancestor = ancestor._tree;
+ node = node._tree;
+ var change = shift / (node.number - ancestor.number);
+ ancestor.change += change;
+ node.change -= change;
+ node.shift += shift;
+ node.prelim += shift;
+ node.mod += shift;
+}
+
+function d3_layout_treeAncestor(vim, node, ancestor) {
+ return vim._tree.ancestor.parent == node.parent
+ ? vim._tree.ancestor
+ : ancestor;
+}
+// Squarified Treemaps by Mark Bruls, Kees Huizing, and Jarke J. van Wijk
+// Modified to support a target aspect ratio by Jeff Heer
+d3.layout.treemap = function() {
+ var hierarchy = d3.layout.hierarchy(),
+ round = Math.round,
+ size = [1, 1], // width, height
+ padding = null,
+ pad = d3_layout_treemapPadNull,
+ sticky = false,
+ stickies,
+ ratio = 0.5 * (1 + Math.sqrt(5)); // golden ratio
+
+ // Compute the area for each child based on value & scale.
+ function scale(children, k) {
+ var i = -1,
+ n = children.length,
+ child,
+ area;
+ while (++i < n) {
+ area = (child = children[i]).value * (k < 0 ? 0 : k);
+ child.area = isNaN(area) || area <= 0 ? 0 : area;
+ }
+ }
+
+ // Recursively arranges the specified node's children into squarified rows.
+ function squarify(node) {
+ var children = node.children;
+ if (children && children.length) {
+ var rect = pad(node),
+ row = [],
+ remaining = children.slice(), // copy-on-write
+ child,
+ best = Infinity, // the best row score so far
+ score, // the current row score
+ u = Math.min(rect.dx, rect.dy), // initial orientation
+ n;
+ scale(remaining, rect.dx * rect.dy / node.value);
+ row.area = 0;
+ while ((n = remaining.length) > 0) {
+ row.push(child = remaining[n - 1]);
+ row.area += child.area;
+ if ((score = worst(row, u)) <= best) { // continue with this orientation
+ remaining.pop();
+ best = score;
+ } else { // abort, and try a different orientation
+ row.area -= row.pop().area;
+ position(row, u, rect, false);
+ u = Math.min(rect.dx, rect.dy);
+ row.length = row.area = 0;
+ best = Infinity;
+ }
+ }
+ if (row.length) {
+ position(row, u, rect, true);
+ row.length = row.area = 0;
+ }
+ children.forEach(squarify);
+ }
+ }
+
+ // Recursively resizes the specified node's children into existing rows.
+ // Preserves the existing layout!
+ function stickify(node) {
+ var children = node.children;
+ if (children && children.length) {
+ var rect = pad(node),
+ remaining = children.slice(), // copy-on-write
+ child,
+ row = [];
+ scale(remaining, rect.dx * rect.dy / node.value);
+ row.area = 0;
+ while (child = remaining.pop()) {
+ row.push(child);
+ row.area += child.area;
+ if (child.z != null) {
+ position(row, child.z ? rect.dx : rect.dy, rect, !remaining.length);
+ row.length = row.area = 0;
+ }
+ }
+ children.forEach(stickify);
+ }
+ }
+
+ // Computes the score for the specified row, as the worst aspect ratio.
+ function worst(row, u) {
+ var s = row.area,
+ r,
+ rmax = 0,
+ rmin = Infinity,
+ i = -1,
+ n = row.length;
+ while (++i < n) {
+ if (!(r = row[i].area)) continue;
+ if (r < rmin) rmin = r;
+ if (r > rmax) rmax = r;
+ }
+ s *= s;
+ u *= u;
+ return s
+ ? Math.max((u * rmax * ratio) / s, s / (u * rmin * ratio))
+ : Infinity;
+ }
+
+ // Positions the specified row of nodes. Modifies `rect`.
+ function position(row, u, rect, flush) {
+ var i = -1,
+ n = row.length,
+ x = rect.x,
+ y = rect.y,
+ v = u ? round(row.area / u) : 0,
+ o;
+ if (u == rect.dx) { // horizontal subdivision
+ if (flush || v > rect.dy) v = v ? rect.dy : 0; // over+underflow
+ while (++i < n) {
+ o = row[i];
+ o.x = x;
+ o.y = y;
+ o.dy = v;
+ x += o.dx = v ? round(o.area / v) : 0;
+ }
+ o.z = true;
+ o.dx += rect.x + rect.dx - x; // rounding error
+ rect.y += v;
+ rect.dy -= v;
+ } else { // vertical subdivision
+ if (flush || v > rect.dx) v = v ? rect.dx : 0; // over+underflow
+ while (++i < n) {
+ o = row[i];
+ o.x = x;
+ o.y = y;
+ o.dx = v;
+ y += o.dy = v ? round(o.area / v) : 0;
+ }
+ o.z = false;
+ o.dy += rect.y + rect.dy - y; // rounding error
+ rect.x += v;
+ rect.dx -= v;
+ }
+ }
+
+ function treemap(d) {
+ var nodes = stickies || hierarchy(d),
+ root = nodes[0];
+ root.x = 0;
+ root.y = 0;
+ root.dx = size[0];
+ root.dy = size[1];
+ if (stickies) hierarchy.revalue(root);
+ scale([root], root.dx * root.dy / root.value);
+ (stickies ? stickify : squarify)(root);
+ if (sticky) stickies = nodes;
+ return nodes;
+ }
+
+ treemap.size = function(x) {
+ if (!arguments.length) return size;
+ size = x;
+ return treemap;
+ };
+
+ treemap.padding = function(x) {
+ if (!arguments.length) return padding;
+
+ function padFunction(node) {
+ var p = x.call(treemap, node, node.depth);
+ return p == null
+ ? d3_layout_treemapPadNull(node)
+ : d3_layout_treemapPad(node, typeof p === "number" ? [p, p, p, p] : p);
+ }
+
+ function padConstant(node) {
+ return d3_layout_treemapPad(node, x);
+ }
+
+ var type;
+ pad = (padding = x) == null ? d3_layout_treemapPadNull
+ : (type = typeof x) === "function" ? padFunction
+ : type === "number" ? (x = [x, x, x, x], padConstant)
+ : padConstant;
+ return treemap;
+ };
+
+ treemap.round = function(x) {
+ if (!arguments.length) return round != Number;
+ round = x ? Math.round : Number;
+ return treemap;
+ };
+
+ treemap.sticky = function(x) {
+ if (!arguments.length) return sticky;
+ sticky = x;
+ stickies = null;
+ return treemap;
+ };
+
+ treemap.ratio = function(x) {
+ if (!arguments.length) return ratio;
+ ratio = x;
+ return treemap;
+ };
+
+ return d3_layout_hierarchyRebind(treemap, hierarchy);
+};
+
+function d3_layout_treemapPadNull(node) {
+ return {x: node.x, y: node.y, dx: node.dx, dy: node.dy};
+}
+
+function d3_layout_treemapPad(node, padding) {
+ var x = node.x + padding[3],
+ y = node.y + padding[0],
+ dx = node.dx - padding[1] - padding[3],
+ dy = node.dy - padding[0] - padding[2];
+ if (dx < 0) { x += dx / 2; dx = 0; }
+ if (dy < 0) { y += dy / 2; dy = 0; }
+ return {x: x, y: y, dx: dx, dy: dy};
+}
+})();
diff --git a/media/d3.layout.min.js b/media/d3.layout.min.js
new file mode 100644
index 00000000..c7016b5c
--- /dev/null
+++ b/media/d3.layout.min.js
@@ -0,0 +1 @@
+(function(){function bc(a,b){var c=a.x+b[3],d=a.y+b[0],e=a.dx-b[1]-b[3],f=a.dy-b[0]-b[2];e<0&&(c+=e/2,e=0),f<0&&(d+=f/2,f=0);return{x:c,y:d,dx:e,dy:f}}function bb(a){return{x:a.x,y:a.y,dx:a.dx,dy:a.dy}}function ba(a,b,c){return a._tree.ancestor.parent==b.parent?a._tree.ancestor:c}function _(a,b,c){a=a._tree,b=b._tree;var d=c/(b.number-a.number);a.change+=d,b.change-=d,b.shift+=c,b.prelim+=c,b.mod+=c}function $(a){var b=0,c=0,d=a.children,e=d.length,f;while(--e>=0)f=d[e]._tree,f.prelim+=b,f.mod+=b,b+=f.shift+(c+=f.change)}function Z(a,b){function c(a,d){var e=a.children;if(e&&(i=e.length)){var f,g=null,h=-1,i;while(++h<i)f=e[h],c(f,g),g=f}b(a,d)}c(a,null)}function Y(a,b){return a.depth-b.depth}function X(a,b){return b.x-a.x}function W(a,b){return a.x-b.x}function V(a,b){var c=a.children;if(c&&(e=c.length)){var d,e,f=-1;while(++f<e)b(d=V(c[f],b),a)>0&&(a=d)}return a}function U(a){var b=a.children,c;return b&&(c=b.length)?b[c-1]:a._tree.thread}function T(a){var b=a.children;return b&&b.length?b[0]:a._tree.thread}function S(a,b){return a.parent==b.parent?1:2}function R(a){var b=a.children,c;return b&&(c=b.length)?R(b[c-1]):a}function Q(a){var b=a.children;return b&&b.length?Q(b[0]):a}function P(a){return a.reduce(function(a,b){return a+b.x},0)/a.length}function O(a){return 1+d3.max(a,function(a){return a.y})}function N(a,b,c){var d=a.r+c.r,e=b.x-a.x,f=b.y-a.y;if(d&&(e||f)){var g=b.r+c.r,h=Math.sqrt(e*e+f*f),i=Math.max(-1,Math.min(1,(d*d+h*h-g*g)/(2*d*h))),j=Math.acos(i),k=i*(d/=h),l=Math.sin(j)*d;c.x=a.x+k*e+l*f,c.y=a.y+k*f-l*e}else c.x=a.x+d,c.y=a.y}function M(a,b,c,d){var e=a.children;a.x=b+=d*a.x,a.y=c+=d*a.y,a.r*=d;if(e){var f=-1,g=e.length;while(++f<g)M(e[f],b,c,d)}}function L(a){var b=a.children;b&&b.length?(b.forEach(L),a.r=I(b)):a.r=Math.sqrt(a.value)}function K(a){delete a._pack_next,delete a._pack_prev}function J(a){a._pack_next=a._pack_prev=a}function I(a){function l(a){b=Math.min(a.x-a.r,b),c=Math.max(a.x+a.r,c),d=Math.min(a.y-a.r,d),e=Math.max(a.y+a.r,e)}var b=Infinity,c=-Infinity,d=Infinity,e=-Infinity,f=a.length,g,h,i,j,k;a.forEach(J),g=a[0],g.x=-g.r,g.y=0,l(g);if(f>1){h=a[1],h.x=h.r,h.y=0,l(h);if(f>2){i=a[2],N(g,h,i),l(i),F(g,i),g._pack_prev=i,F(i,h),h=g._pack_next;for(var m=3;m<f;m++){N(g,h,i=a[m]);var n=0,o=1,p=1;for(j=h._pack_next;j!==h;j=j._pack_next,o++)if(H(j,i)){n=1;break}if(n==1)for(k=g._pack_prev;k!==j._pack_prev;k=k._pack_prev,p++)if(H(k,i)){p<o&&(n=-1,j=k);break}n==0?(F(g,i),h=i,l(i)):n>0?(G(g,j),h=j,m--):(G(j,h),g=j,m--)}}}var q=(b+c)/2,r=(d+e)/2,s=0;for(var m=0;m<f;m++){var t=a[m];t.x-=q,t.y-=r,s=Math.max(s,t.r+Math.sqrt(t.x*t.x+t.y*t.y))}a.forEach(K);return s}function H(a,b){var c=b.x-a.x,d=b.y-a.y,e=a.r+b.r;return e*e-c*c-d*d>.001}function G(a,b){a._pack_next=b,b._pack_prev=a}function F(a,b){var c=a._pack_next;a._pack_next=b,b._pack_prev=a,b._pack_next=c,c._pack_prev=b}function E(a,b){return a.value-b.value}function C(a){return d3.merge(a.map(function(a){return(a.children||[]).map(function(b){return{source:a,target:b}})}))}function B(a,b){return b.value-a.value}function A(a){return a.value}function z(a){return a.children}function y(a,b){a.sort=d3.rebind(a,b.sort),a.children=d3.rebind(a,b.children),a.links=C,a.value=d3.rebind(a,b.value),a.nodes=function(b){D=!0;return(a.nodes=a)(b)};return a}function x(a){return[d3.min(a),d3.max(a)]}function w(a,b){var c=-1,d=+a[0],e=(a[1]-d)/b,f=[];while(++c<=b)f[c]=e*c+d;return f}function v(a,b){return w(a,Math.ceil(Math.log(b.length)/Math.LN2+1))}function u(a,b){return a+b[1]}function t(a){return a.reduce(u,0)}function s(a){var b=1,c=0,d=a[0][1],e,f=a.length;for(;b<f;++b)(e=a[b][1])>d&&(c=b,d=e);return c}function p(a,b,c){a.y0=b,a.y=c}function o(a){return a.y}function n(a){return a.x}function m(a){return 1}function l(a){return 20}function k(a,b,c){var d=0,e=0;a.charge=0;if(!a.leaf){var f=a.nodes,g=f.length,h=-1,i;while(++h<g){i=f[h];if(i==null)continue;k(i,b,c),a.charge+=i.charge,d+=i.charge*i.cx,e+=i.charge*i.cy}}if(a.point){a.leaf||(a.point.x+=Math.random()-.5,a.point.y+=Math.random()-.5);var j=b*c[a.point.index];a.charge+=a.pointCharge=j,d+=j*a.point.x,e+=j*a.point.y}a.cx=d/a.charge,a.cy=e/a.charge}function j(){f.px+=d3.event.dx,f.py+=d3.event.dy,e.resume()}function i(){j(),f.fixed&=1,e=f=null}function h(a){a!==f&&(a.fixed&=1)}function g(a){a.fixed|=2}function c(a,c){if(a===c)return a;var d=b(a),e=b(c),f=d.pop(),g=e.pop(),h=null;while(f===g)h=f,f=d.pop(),g=e.pop();return h}function b(a){var b=[],c=a.parent;while(c!=null)b.push(a),a=c,c=c.parent;b.push(a);return b}function a(a){var b=a.source,d=a.target,e=c(b,d),f=[b];while(b!==e)b=b.parent,f.push(b);var g=f.length;while(d!==e)f.splice(g,0,d),d=d.parent;return f}d3.layout={},d3.layout.bundle=function(){return function(b){var c=[],d=-1,e=b.length;while(++d<e)c.push(a(b[d]));return c}},d3.layout.chord=function(){function k(){b.sort(function(a,b){return i(a.target.value,b.target.value)})}function j(){var a={},j=[],l=d3.range(e),m=[],n,o,p,q,r;b=[],c=[],n=0,q=-1;while(++q<e){o=0,r=-1;while(++r<e)o+=d[q][r];j.push(o),m.push(d3.range(e)),n+=o}g&&l.sort(function(a,b){return g(j[a],j[b])}),h&&m.forEach(function(a,b){a.sort(function(a,c){return h(d[b][a],d[b][c])})}),n=(2*Math.PI-f*e)/n,o=0,q=-1;while(++q<e){p=o,r=-1;while(++r<e){var s=l[q],t=m[q][r],u=d[s][t];a[s+"-"+t]={index:s,subindex:t,startAngle:o,endAngle:o+=u*n,value:u}}c.push({index:s,startAngle:p,endAngle:o,value:(o-p)/n}),o+=f}q=-1;while(++q<e){r=q-1;while(++r<e){var v=a[q+"-"+r],w=a[r+"-"+q];(v.value||w.value)&&b.push(v.value<w.value?{source:w,target:v}:{source:v,target:w})}}i&&k()}var a={},b,c,d,e,f=0,g,h,i;a.matrix=function(f){if(!arguments.length)return d;e=(d=f)&&d.length,b=c=null;return a},a.padding=function(d){if(!arguments.length)return f;f=d,b=c=null;return a},a.sortGroups=function(d){if(!arguments.length)return g;g=d,b=c=null;return a},a.sortSubgroups=function(c){if(!arguments.length)return h;h=c,b=null;return a},a.sortChords=function(c){if(!arguments.length)return i;i=c,b&&k();return a},a.chords=function(){b||j();return b},a.groups=function(){c||j();return c};return a},d3.layout.force=function(){function C(b){g(f=b),e=a}function B(){var a=v.length,d=w.length,e,f,g,h,i,j,l,m,p;for(f=0;f<d;++f){g=w[f],h=g.source,i=g.target,m=i.x-h.x,p=i.y-h.y;if(j=m*m+p*p)j=n*y[f]*((j=Math.sqrt(j))-x[f])/j,m*=j,p*=j,i.x-=m*(l=h.weight/(i.weight+h.weight)),i.y-=p*l,h.x+=m*(l=1-l),h.y+=p*l}if(l=n*s){m=c[0]/2,p=c[1]/2,f=-1;if(l)while(++f<a)g=v[f],g.x+=(m-g.x)*l,g.y+=(p-g.y)*l}if(r){k(e=d3.geom.quadtree(v),n,z),f=-1;while(++f<a)(g=v[f]).fixed||e.visit(A(g))}f=-1;while(++f<a)g=v[f],g.fixed?(g.x=g.px,g.y=g.py):(g.x-=(g.px-(g.px=g.x))*o,g.y-=(g.py-(g.py=g.y))*o);b.tick.dispatch({type:"tick",alpha:n});return(n*=.99)<.005}function A(a){return function(b,c,d,e,f){if(b.point!==a){var g=b.cx-a.x,h=b.cy-a.y,i=1/Math.sqrt(g*g+h*h);if((e-c)*i<t){var j=b.charge*i*i;a.px-=g*j,a.py-=h*j;return!0}if(b.point&&isFinite(i)){var j=b.pointCharge*i*i;a.px-=g*j,a.py-=h*j}}return!b.charge}}var a={},b=d3.dispatch("tick"),c=[1,1],d,n,o=.9,p=l,q=m,r=-30,s=.1,t=.8,u,v=[],w=[],x,y,z;a.on=function(c,d){b[c].add(d);return a},a.nodes=function(b){if(!arguments.length)return v;v=b;return a},a.links=function(b){if(!arguments.length)return w;w=b;return a},a.size=function(b){if(!arguments.length)return c;c=b;return a},a.linkDistance=function(b){if(!arguments.length)return p;p=d3.functor(b);return a},a.distance=a.linkDistance,a.linkStrength=function(b){if(!arguments.length)return q;q=d3.functor(b);return a},a.friction=function(b){if(!arguments.length)return o;o=b;return a},a.charge=function(b){if(!arguments.length)return r;r=typeof b=="function"?b:+b;return a},a.gravity=function(b){if(!arguments.length)return s;s=b;return a},a.theta=function(b){if(!arguments.length)return t;t=b;return a},a.start=function(){function l(){if(!i){i=[];for(d=0;d<e;++d)i[d]=[];for(d=0;d<f;++d){var a=w[d];i[a.source.index].push(a.target),i[a.target.index].push(a.source)}}return i[b]}function k(a,c){var d=l(b),e=-1,f=d.length,g;while(++e<f)if(!isNaN(g=d[e][a]))return g;return Math.random()*c}var b,d,e=v.length,f=w.length,g=c[0],h=c[1],i,j;for(b=0;b<e;++b)(j=v[b]).index=b,j.weight=0;x=[],y=[];for(b=0;b<f;++b)j=w[b],typeof j.source=="number"&&(j.source=v[j.source]),typeof j.target=="number"&&(j.target=v[j.target]),x[b]=p.call(this,j,b),y[b]=q.call(this,j,b),++j.source.weight,++j.target.weight;for(b=0;b<e;++b)j=v[b],isNaN(j.x)&&(j.x=k("x",g)),isNaN(j.y)&&(j.y=k("y",h)),isNaN(j.px)&&(j.px=j.x),isNaN(j.py)&&(j.py=j.y);z=[];if(typeof r=="function")for(b=0;b<e;++b)z[b]=+r.call(this,v[b],b);else for(b=0;b<e;++b)z[b]=r;return a.resume()},a.resume=function(){n=.1,d3.timer(B);return a},a.stop=function(){n=0;return a},a.drag=function(){d||(d=d3.behavior.drag().on("dragstart",C).on("drag",j).on("dragend",i)),this.on("mouseover.force",g).on("mouseout.force",h).call(d)};return a};var e,f;d3.layout.partition=function(){function e(e,f){var g=a.call(this,e,f);c(g[0],0,b[0],b[1]/d(g[0]));return g}function d(a){var b=a.children,c=0;if(b&&(f=b.length)){var e=-1,f;while(++e<f)c=Math.max(c,d(b[e]))}return 1+c}function c(a,b,d,e){var f=a.children;a.x=b,a.y=a.depth*e,a.dx=d,a.dy=e;if(f&&(h=f.length)){var g=-1,h,i,j;d=a.value?d/a.value:0;while(++g<h)c(i=f[g],b,j=i.value*d,e),b+=j}}var a=d3.layout.hierarchy(),b=[1,1];e.size=function(a){if(!arguments.length)return b;b=a;return e};return y(e,a)},d3.layout.pie=function(){function f(f,g){var h=+(typeof c=="function"?c.apply(this,arguments):c),i=(typeof e=="function"?e.apply(this,arguments):e)-c,j=d3.range(f.length);b!=null&&j.sort(function(a,c){return b(f[a],f[c])});var k=f.map(a);i/=k.reduce(function(a,b){return a+b},0);var l=j.map(function(a){return{data:f[a],value:d=k[a],startAngle:h,endAngle:h+=d*i}});return f.map(function(a,b){return l[j[b]]})}var a=Number,b=null,c=0,e=2*Math.PI;f.value=function(b){if(!arguments.length)return a;a=b;return f},f.sort=function(a){if(!arguments.length)return b;b=a;return f},f.startAngle=function(a){if(!arguments.length)return c;c=a;return f},f.endAngle=function(a){if(!arguments.length)return e;e=a;return f};return f},d3.layout.stack=function(){function g(h,i){var j=h.map(function(b,c){return a.call(g,b,c)}),k=j.map(function(a,b){return a.map(function(a,b){return[e.call(g,a,b),f.call(g,a,b)]})}),l=b.call(g,k,i);j=d3.permute(j,l),k=d3.permute(k,l);var m=c.call(g,k,i),n=j.length,o=j[0].length,p,q,r;for(q=0;q<o;++q){d.call(g,j[0][q],r=m[q],k[0][q][1]);for(p=1;p<n;++p)d.call(g,j[p][q],r+=k[p-1][q][1],k[p][q][1])}return h}var a=Object,b=q["default"],c=r.zero,d=p,e=n,f=o;g.values=function(b){if(!arguments.length)return a;a=b;return g},g.order=function(a){if(!arguments.length)return b;b=typeof a=="function"?a:q[a];return g},g.offset=function(a){if(!arguments.length)return c;c=typeof a=="function"?a:r[a];return g},g.x=function(a){if(!arguments.length)return e;e=a;return g},g.y=function(a){if(!arguments.length)return f;f=a;return g},g.out=function(a){if(!arguments.length)return d;d=a;return g};return g};var q={"inside-out":function(a){var b=a.length,c,d,e=a.map(s),f=a.map(t),g=d3.range(b).sort(function(a,b){return e[a]-e[b]}),h=0,i=0,j=[],k=[];for(c=0;c<b;++c)d=g[c],h<i?(h+=f[d],j.push(d)):(i+=f[d],k.push(d));return k.reverse().concat(j)},reverse:function(a){return d3.range(a.length).reverse()},"default":function(a){return d3.range(a.length)}},r={silhouette:function(a){var b=a.length,c=a[0].length,d=[],e=0,f,g,h,i=[];for(g=0;g<c;++g){for(f=0,h=0;f<b;f++)h+=a[f][g][1];h>e&&(e=h),d.push(h)}for(g=0;g<c;++g)i[g]=(e-d[g])/2;return i},wiggle:function(a){var b=a.length,c=a[0],d=c.length,e=0,f,g,h,i,j,k,l,m,n,o=[];o[0]=m=n=0;for(g=1;g<d;++g){for(f=0,i=0;f<b;++f)i+=a[f][g][1];for(f=0,j=0,l=c[g][0]-c[g-1][0];f<b;++f){for(h=0,k=(a[f][g][1]-a[f][g-1][1])/(2*l);h<f;++h)k+=(a[h][g][1]-a[h][g-1][1])/l;j+=k*a[f][g][1]}o[g]=m-=i?j/i*l:0,m<n&&(n=m)}for(g=0;g<d;++g)o[g]-=n;return o},expand:function(a){var b=a.length,c=a[0].length,d=1/b,e,f,g,h=[];for(f=0;f<c;++f){for(e=0,g=0;e<b;e++)g+=a[e][f][1];if(g)for(e=0;e<b;e++)a[e][f][1]/=g;else for(e=0;e<b;e++)a[e][f][1]=d}for(f=0;f<c;++f)h[f]=0;return h},zero:function(a){var b=-1,c=a[0].length,d=[];while(++b<c)d[b]=0;return d}};d3.layout.histogram=function(){function e(e,f){var g=[],h=e.map(b,this),i=c.call(this,h,f),j=d.call(this,i,h,f),k,f=-1,l=h.length,m=j.length-1,n=a?1:1/l,o;while(++f<m)k=g[f]=[],k.dx=j[f+1]-(k.x=j[f]),k.y=0;f=-1;while(++f<l)o=h[f],o>=i[0]&&o<=i[1]&&(k=g[d3.bisect(j,o,1,m)-1],k.y+=n,k.push(e[f]));return g}var a=!0,b=Number,c=x,d=v;e.value=function(a){if(!arguments.length)return b;b=a;return e},e.range=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.bins=function(a){if(!arguments.length)return d;d=typeof a=="number"?function(b){return w(b,a)}:d3.functor(a);return e},e.frequency=function(b){if(!arguments.length)return a;a=!!b;return e};return e},d3.layout.hierarchy=function(){function g(a){var b=[];e(a,0,b);return b}function f(a,b){var d=a.children,e=0;if(d&&(i=d.length)){var h=-1,i,j=b+1;while(++h<i)e+=f(d[h],j)}else c&&(e=+c.call(g,D?a:a.data,b)||0);c&&(a.value=e);return e}function e(f,h,i){var j=b.call(g,f,h),k=D?f:{data:f};k.depth=h,i.push(k);if(j&&(m=j.length)){var l=-1,m,n=k.children=[],o=0,p=h+1;while(++l<m)d=e(j[l],p,i),d.parent=k,n.push(d),o+=d.value;a&&n.sort(a),c&&(k.value=o)}else c&&(k.value=+c.call(g,f,h)||0);return k}var a=B,b=z,c=A;g.sort=function(b){if(!arguments.length)return a;a=b;return g},g.children=function(a){if(!arguments.length)return b;b=a;return g},g.value=function(a){if(!arguments.length)return c;c=a;return g},g.revalue=function(a){f(a,0);return a};return g};var D=!1;d3.layout.pack=function(){function c(c,d){var e=a.call(this,c,d),f=e[0];f.x=0,f.y=0,L(f);var g=b[0],h=b[1],i=1/Math.max(2*f.r/g,2*f.r/h);M(f,g/2,h/2,i);return e}var a=d3.layout.hierarchy().sort(E),b=[1,1];c.size=function(a){if(!arguments.length)return b;b=a;return c};return y(c,a)},d3.layout.cluster=function(){function d(d,e){var f=a.call(this,d,e),g=f[0],h,i=0,j,k;Z(g,function(a){var c=a.children;c&&c.length?(a.x=P(c),a.y=O(c)):(a.x=h?i+=b(a,h):0,a.y=0,h=a)});var l=Q(g),m=R(g),n=l.x-b(l,m)/2,o=m.x+b(m,l)/2;Z(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=(1-a.y/g.y)*c[1]});return f}var a=d3.layout.hierarchy().sort(null).value(null),b=S,c=[1,1];d.separation=function(a){if(!arguments.length)return b;b=a;return d},d.size=function(a){if(!arguments.length)return c;c=a;return d};return y(d,a)},d3.layout.tree=function(){function d(d,e){function j(a,c,d){if(c){var e=a,f=a,g=c,h=a.parent.children[0],i=e._tree.mod,j=f._tree.mod,k=g._tree.mod,l=h._tree.mod,m;while(g=U(g),e=T(e),g&&e)h=T(h),f=U(f),f._tree.ancestor=a,m=g._tree.prelim+k-e._tree.prelim-i+b(g,e),m>0&&(_(ba(g,a,d),a,m),i+=m,j+=m),k+=g._tree.mod,i+=e._tree.mod,l+=h._tree.mod,j+=f._tree.mod;g&&!U(f)&&(f._tree.thread=g,f._tree.mod+=k-j),e&&!T(h)&&(h._tree.thread=e,h._tree.mod+=i-l,d=a)}return d}function i(a,b){a.x=a._tree.prelim+b;var c=a.children;if(c&&(e=c.length)){var d=-1,e;b+=a._tree.mod;while(++d<e)i(c[d],b)}}function h(a,c){var d=a.children,e=a._tree;if(d&&(f=d.length)){var f,g=d[0],i,k=g,l,m=-1;while(++m<f)l=d[m],h(l,i),k=j(l,i,k),i=l;$(a);var n=.5*(g._tree.prelim+l._tree.prelim);c?(e.prelim=c._tree.prelim+b(a,c),e.mod=e.prelim-n):e.prelim=n}else c&&(e.prelim=c._tree.prelim+b(a,c))}var f=a.call(this,d,e),g=f[0];Z(g,function(a,b){a._tree={ancestor:a,prelim:0,mod:0,change:0,shift:0,number:b?b._tree.number+1:0}}),h(g),i(g,-g._tree.prelim);var k=V(g,X),l=V(g,W),m=V(g,Y),n=k.x-b(k,l)/2,o=l.x+b(l,k)/2,p=m.depth||1;Z(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=a.depth/p*c[1],delete a._tree});return f}var a=d3.layout.hierarchy().sort(null).value(null),b=S,c=[1,1];d.separation=function(a){if(!arguments.length)return b;b=a;return d},d.size=function(a){if(!arguments.length)return c;c=a;return d};return y(d,a)},d3.layout.treemap=function(){function n(b){var d=g||a(b),e=d[0];e.x=0,e.y=0,e.dx=c[0],e.dy=c[1],g&&a.revalue(e),i([e],e.dx*e.dy/e.value),(g?k:j)(e),f&&(g=d);return d}function m(a,c,d,e){var f=-1,g=a.length,h=d.x,i=d.y,j=c?b(a.area/c):0,k;if(c==d.dx){if(e||j>d.dy)j=j?d.dy:0;while(++f<g)k=a[f],k.x=h,k.y=i,k.dy=j,h+=k.dx=j?b(k.area/j):0;k.z=!0,k.dx+=d.x+d.dx-h,d.y+=j,d.dy-=j}else{if(e||j>d.dx)j=j?d.dx:0;while(++f<g)k=a[f],k.x=h,k.y=i,k.dx=j,i+=k.dy=j?b(k.area/j):0;k.z=!1,k.dy+=d.y+d.dy-i,d.x+=j,d.dx-=j}}function l(a,b){var c=a.area,d,e=0,f=Infinity,g=-1,i=a.length;while(++g<i){if(!(d=a[g].area))continue;d<f&&(f=d),d>e&&(e=d)}c*=c,b*=b;return c?Math.max(b*e*h/c,c/(b*f*h)):Infinity}function k(a){var b=a.children;if(b&&b.length){var c=e(a),d=b.slice(),f,g=[];i(d,c.dx*c.dy/a.value),g.area=0;while(f=d.pop())g.push(f),g.area+=f.area,f.z!=null&&(m(g,f.z?c.dx:c.dy,c,!d.length),g.length=g.area=0);b.forEach(k)}}function j(a){var b=a.children;if(b&&b.length){var c=e(a),d=[],f=b.slice(),g,h=Infinity,k,n=Math.min(c.dx,c.dy),o;i(f,c.dx*c.dy/a.value),d.area=0;while((o=f.length)>0)d.push(g=f[o-1]),d.area+=g.area,(k=l(d,n))<=h?(f.pop(),h=k):(d.area-=d.pop().area,m(d,n,c,!1),n=Math.min(c.dx,c.dy),d.length=d.area=0,h=Infinity);d.length&&(m(d,n,c,!0),d.length=d.area=0),b.forEach(j)}}function i(a,b){var c=-1,d=a.length,e,f;while(++c<d)f=(e=a[c]).value*(b<0?0:b),e.area=isNaN(f)||f<=0?0:f}var a=d3.layout.hierarchy(),b=Math.round,c=[1,1],d=null,e=bb,f=!1,g,h=.5*(1+Math.sqrt(5));n.size=function(a){if(!arguments.length)return c;c=a;return n},n.padding=function(a){function c(b){return bc(b,a)}function b(b){var c=a.call(n,b,b.depth);return c==null?bb(b):bc(b,typeof c=="number"?[c,c,c,c]:c)}if(!arguments.length)return d;var f;e=(d=a)==null?bb:(f=typeof a)==="function"?b:f==="number"?(a=[a,a,a,a],c):c;return n},n.round=function(a){if(!arguments.length)return b!=Number;b=a?Math.round:Number;return n},n.sticky=function(a){if(!arguments.length)return f;f=a,g=null;return n},n.ratio=function(a){if(!arguments.length)return h;h=a;return n};return y(n,a)}})() \ No newline at end of file
diff --git a/media/d3.min.js b/media/d3.min.js
new file mode 100644
index 00000000..8932b1a0
--- /dev/null
+++ b/media/d3.min.js
@@ -0,0 +1,2 @@
+(function(){function dE(a,b,c){function i(a,b){var c=a.__domain||(a.__domain=a.domain()),d=a.range().map(function(a){return(a-b)/h});a.domain(c).domain(d.map(a.invert))}var d=Math.pow(2,(dq[2]=a)-c[2]),e=dq[0]=b[0]-d*c[0],f=dq[1]=b[1]-d*c[1],g=d3.event,h=Math.pow(2,a);d3.event={scale:h,translate:[e,f],transform:function(a,b){a&&i(a,e),b&&i(b,f)}};try{dr.apply(dt,du)}finally{d3.event=g}g.preventDefault()}function dD(){dw&&ds===d3.event.target&&(d3.event.stopPropagation(),d3.event.preventDefault(),dw=!1,ds=null)}function dC(){dl&&(dv&&ds===d3.event.target&&(dw=!0),dB(),dl=null)}function dB(){dm=null,dl&&(dv=!0,dE(dq[2],d3.svg.mouse(dt),dl))}function dA(){var a=d3.svg.touches(dt);switch(a.length){case 1:var b=a[0];dE(dq[2],b,dn[b.identifier]);break;case 2:var c=a[0],d=a[1],e=[(c[0]+d[0])/2,(c[1]+d[1])/2],f=dn[c.identifier],g=dn[d.identifier],h=[(f[0]+g[0])/2,(f[1]+g[1])/2,f[2]];dE(Math.log(d3.event.scale)/Math.LN2+f[2],e,h)}}function dz(){var a=d3.svg.touches(dt),b=-1,c=a.length,d;while(++b<c)dn[(d=a[b]).identifier]=dx(d);return a}function dy(){dk||(dk=d3.select("body").append("div").style("visibility","hidden").style("top",0).style("height",0).style("width",0).style("overflow-y","scroll").append("div").style("height","2000px").node().parentNode);var a=d3.event,b;try{dk.scrollTop=1e3,dk.dispatchEvent(a),b=1e3-dk.scrollTop}catch(c){b=a.wheelDelta||-a.detail*5}return b*.005}function dx(a){return[a[0]-dq[0],a[1]-dq[1],dq[2]]}function dj(){d3.event.stopPropagation(),d3.event.preventDefault()}function di(){dd&&c$===d3.event.target&&(dj(),dd=!1,c$=null)}function dh(){!c_||(de("dragend"),c_=null,dc&&c$===d3.event.target&&(dd=!0,dj()))}function dg(){if(!!c_){var a=c_.parentNode;if(!a)return dh();de("drag"),dj()}}function df(a){return d3.event.touches?d3.svg.touches(a)[0]:d3.svg.mouse(a)}function de(a){var b=d3.event,c=c_.parentNode,d=0,e=0;c&&(c=df(c),d=c[0]-db[0],e=c[1]-db[1],db=c,dc|=d|e);try{d3.event={dx:d,dy:e},cZ[a].dispatch.apply(c_,da)}finally{d3.event=b}b.preventDefault()}function cY(a,b,c){e=[];if(c&&b.length>1){var d=bC(a.domain()),e,f=-1,g=b.length,h=(b[1]-b[0])/++c,i,j;while(++f<g)for(i=c;--i>0;)(j=+b[f]-i*h)>=d[0]&&e.push(j);for(--f,i=0;++i<c&&(j=+b[f]+i*h)<d[1];)e.push(j)}return e}function cX(a,b){a.attr("transform",function(a){return"translate(0,"+b(a)+")"})}function cW(a,b){a.attr("transform",function(a){return"translate("+b(a)+",0)"})}function cS(){return"circle"}function cR(){return 64}function cQ(a,b){var c=(a.ownerSVGElement||a).createSVGPoint();if(cP<0&&(window.scrollX||window.scrollY)){var d=d3.select(document.body).append("svg:svg").style("position","absolute").style("top",0).style("left",0),e=d[0][0].getScreenCTM();cP=!e.f&&!e.e,d.remove()}cP?(c.x=b.pageX,c.y=b.pageY):(c.x=b.clientX,c.y=b.clientY),c=c.matrixTransform(a.getScreenCTM().inverse());return[c.x,c.y]}function cO(a){return function(){var b=a.apply(this,arguments),c=b[0],d=b[1]+b$;return[c*Math.cos(d),c*Math.sin(d)]}}function cN(a){return[a.x,a.y]}function cM(a){return a.endAngle}function cL(a){return a.startAngle}function cK(a){return a.radius}function cJ(a){return a.target}function cI(a){return a.source}function cH(a){return function(b,c){return a[c][1]}}function cG(a){return function(b,c){return a[c][0]}}function cF(a){function j(f){if(f.length<1)return null;var j=cf(this,f,b,d),k=cf(this,f,b===c?cG(j):c,d===e?cH(j):e);return"M"+g(a(k),i)+"L"+h(a(j.reverse()),i)+"Z"}var b=cg,c=cg,d=0,e=ch,f,g,h,i=.7;j.x=function(a){if(!arguments.length)return c;b=c=a;return j},j.x0=function(a){if(!arguments.length)return b;b=a;return j},j.x1=function(a){if(!arguments.length)return c;c=a;return j},j.y=function(a){if(!arguments.length)return e;d=e=a;return j},j.y0=function(a){if(!arguments.length)return d;d=a;return j},j.y1=function(a){if(!arguments.length)return e;e=a;return j},j.interpolate=function(a){if(!arguments.length)return f;g=ci[f=a],h=g.reverse||g;return j},j.tension=function(a){if(!arguments.length)return i;i=a;return j};return j.interpolate("linear")}function cE(a){var b,c=-1,d=a.length,e,f;while(++c<d)b=a[c],e=b[0],f=b[1]+b$,b[0]=e*Math.cos(f),b[1]=e*Math.sin(f);return a}function cD(a){return a.length<3?cj(a):a[0]+cp(a,cC(a))}function cC(a){var b=[],c,d,e,f,g=cB(a),h=-1,i=a.length-1;while(++h<i)c=cA(a[h],a[h+1]),Math.abs(c)<1e-6?g[h]=g[h+1]=0:(d=g[h]/c,e=g[h+1]/c,f=d*d+e*e,f>9&&(f=c*3/Math.sqrt(f),g[h]=f*d,g[h+1]=f*e));h=-1;while(++h<=i)f=(a[Math.min(i,h+1)][0]-a[Math.max(0,h-1)][0])/(6*(1+g[h]*g[h])),b.push([f||0,g[h]*f||0]);return b}function cB(a){var b=0,c=a.length-1,d=[],e=a[0],f=a[1],g=d[0]=cA(e,f);while(++b<c)d[b]=g+(g=cA(e=f,f=a[b+1]));d[b]=g;return d}function cA(a,b){return(b[1]-a[1])/(b[0]-a[0])}function cz(a,b,c){a.push("C",cv(cw,b),",",cv(cw,c),",",cv(cx,b),",",cv(cx,c),",",cv(cy,b),",",cv(cy,c))}function cv(a,b){return a[0]*b[0]+a[1]*b[1]+a[2]*b[2]+a[3]*b[3]}function cu(a,b){var c=a.length-1,d=a[0][0],e=a[0][1],f=a[c][0]-d,g=a[c][1]-e,h=-1,i,j;while(++h<=c)i=a[h],j=h/c,i[0]=b*i[0]+(1-b)*(d+j*f),i[1]=b*i[1]+(1-b)*(e+j*g);return cr(a)}function ct(a){var b,c=-1,d=a.length,e=d+4,f,g=[],h=[];while(++c<4)f=a[c%d],g.push(f[0]),h.push(f[1]);b=[cv(cy,g),",",cv(cy,h)],--c;while(++c<e)f=a[c%d],g.shift(),g.push(f[0]),h.shift(),h.push(f[1]),cz(b,g,h);return b.join("")}function cs(a){if(a.length<4)return cj(a);var b=[],c=-1,d=a.length,e,f=[0],g=[0];while(++c<3)e=a[c],f.push(e[0]),g.push(e[1]);b.push(cv(cy,f)+","+cv(cy,g)),--c;while(++c<d)e=a[c],f.shift(),f.push(e[0]),g.shift(),g.push(e[1]),cz(b,f,g);return b.join("")}function cr(a){if(a.length<3)return cj(a);var b=1,c=a.length,d=a[0],e=d[0],f=d[1],g=[e,e,e,(d=a[1])[0]],h=[f,f,f,d[1]],i=[e,",",f];cz(i,g,h);while(++b<c)d=a[b],g.shift(),g.push(d[0]),h.shift(),h.push(d[1]),cz(i,g,h);b=-1;while(++b<2)g.shift(),g.push(d[0]),h.shift(),h.push(d[1]),cz(i,g,h);return i.join("")}function cq(a,b){var c=[],d=(1-b)/2,e,f=a[0],g=a[1],h=1,i=a.length;while(++h<i)e=f,f=g,g=a[h],c.push([d*(g[0]-e[0]),d*(g[1]-e[1])]);return c}function cp(a,b){if(b.length<1||a.length!=b.length&&a.length!=b.length+2)return cj(a);var c=a.length!=b.length,d="",e=a[0],f=a[1],g=b[0],h=g,i=1;c&&(d+="Q"+(f[0]-g[0]*2/3)+","+(f[1]-g[1]*2/3)+","+f[0]+","+f[1],e=a[1],i=2);if(b.length>1){h=b[1],f=a[i],i++,d+="C"+(e[0]+g[0])+","+(e[1]+g[1])+","+(f[0]-h[0])+","+(f[1]-h[1])+","+f[0]+","+f[1];for(var j=2;j<b.length;j++,i++)f=a[i],h=b[j],d+="S"+(f[0]-h[0])+","+(f[1]-h[1])+","+f[0]+","+f[1]}if(c){var k=a[i];d+="Q"+(f[0]+h[0]*2/3)+","+(f[1]+h[1]*2/3)+","+k[0]+","+k[1]}return d}function co(a,b,c){return a.length<3?cj(a):a[0]+cp(a,cq(a,b))}function cn(a,b){return a.length<3?cj(a):a[0]+cp((a.push(a[0]),a),cq([a[a.length-2]].concat(a,[a[1]]),b))}function cm(a,b){return a.length<4?cj(a):a[1]+cp(a.slice(1,a.length-1),cq(a,b))}function cl(a){var b=0,c=a.length,d=a[0],e=[d[0],",",d[1]];while(++b<c)e.push("H",(d=a[b])[0],"V",d[1]);return e.join("")}function ck(a){var b=0,c=a.length,d=a[0],e=[d[0],",",d[1]];while(++b<c)e.push("V",(d=a[b])[1],"H",d[0]);return e.join("")}function cj(a){var b=0,c=a.length,d=a[0],e=[d[0],",",d[1]];while(++b<c)e.push("L",(d=a[b])[0],",",d[1]);return e.join("")}function ch(a){return a[1]}function cg(a){return a[0]}function cf(a,b,c,d){var e=[],f=-1,g=b.length,h=typeof c=="function",i=typeof d=="function",j;if(h&&i)while(++f<g)e.push([c.call(a,j=b[f],f),d.call(a,j,f)]);else if(h)while(++f<g)e.push([c.call(a,b[f],f),d]);else if(i)while(++f<g)e.push([c,d.call(a,b[f],f)]);else while(++f<g)e.push([c,d]);return e}function ce(a){function g(d){return d.length<1?null:"M"+e(a(cf(this,d,b,c)),f)}var b=cg,c=ch,d="linear",e=ci[d],f=.7;g.x=function(a){if(!arguments.length)return b;b=a;return g},g.y=function(a){if(!arguments.length)return c;c=a;return g},g.interpolate=function(a){if(!arguments.length)return d;e=ci[d=a];return g},g.tension=function(a){if(!arguments.length)return f;f=a;return g};return g}function cd(a){return a.endAngle}function cc(a){return a.startAngle}function cb(a){return a.outerRadius}function ca(a){return a.innerRadius}function bZ(a,b,c){function g(){d=c.length/(b-a),e=c.length-1;return f}function f(b){return c[Math.max(0,Math.min(e,Math.floor(d*(b-a))))]}var d,e;f.domain=function(c){if(!arguments.length)return[a,b];a=+c[0],b=+c[c.length-1];return g()},f.range=function(a){if(!arguments.length)return c;c=a;return g()},f.copy=function(){return bZ(a,b,c)};return g()}function bY(a,b){function e(a){return isNaN(a=+a)?NaN:b[d3.bisect(c,a)]}function d(){var d=0,f=a.length,g=b.length;c=[];while(++d<g)c[d-1]=d3.quantile(a,d/g);return e}var c;e.domain=function(b){if(!arguments.length)return a;a=b.filter(function(a){return!isNaN(a)}).sort(d3.ascending);return d()},e.range=function(a){if(!arguments.length)return b;b=a;return d()},e.quantiles=function(){return c},e.copy=function(){return bY(a,b)};return d()}function bT(a,b){function f(b){return d[((c[b]||(c[b]=a.push(b)))-1)%d.length]}var c,d,e;f.domain=function(d){if(!arguments.length)return a;a=[],c={};var e=-1,g=d.length,h;while(++e<g)c[h=d[e]]||(c[h]=a.push(h));return f[b.t](b.x,b.p)},f.range=function(a){if(!arguments.length)return d;d=a,e=0,b={t:"range",x:a};return f},f.rangePoints=function(c,g){arguments.length<2&&(g=0);var h=c[0],i=c[1],j=(i-h)/(a.length-1+g);d=a.length<2?[(h+i)/2]:d3.range(h+j*g/2,i+j/2,j),e=0,b={t:"rangePoints",x:c,p:g};return f},f.rangeBands=function(c,g){arguments.length<2&&(g=0);var h=c[0],i=c[1],j=(i-h)/(a.length+g);d=d3.range(h+j*g,i,j),e=j*(1-g),b={t:"rangeBands",x:c,p:g};return f},f.rangeRoundBands=function(c,g){arguments.length<2&&(g=0);var h=c[0],i=c[1],j=Math.floor((i-h)/(a.length+g)),k=i-h-(a.length-g)*j;d=d3.range(h+Math.round(k/2),i,j),e=Math.round(j*(1-g)),b={t:"rangeRoundBands",x:c,p:g};return f},f.rangeBand=function(){return e},f.copy=function(){return bT(a,b)};return f.domain(a)}function bS(a){return function(b){return b<0?-Math.pow(-b,a):Math.pow(b,a)}}function bR(a,b){function e(b){return a(c(b))}var c=bS(b),d=bS(1/b);e.invert=function(b){return d(a.invert(b))},e.domain=function(b){if(!arguments.length)return a.domain().map(d);a.domain(b.map(c));return e},e.ticks=function(a){return bJ(e.domain(),a)},e.tickFormat=function(a){return bK(e.domain(),a)},e.nice=function(){return e.domain(bD(e.domain(),bH))},e.exponent=function(a){if(!arguments.length)return b;var f=e.domain();c=bS(b=a),d=bS(1/b);return e.domain(f)},e.copy=function(){return bR(a.copy(),b)};return bG(e,a)}function bQ(a){return-Math.log(-a)/Math.LN10}function bP(a){return Math.log(a)/Math.LN10}function bN(a,b){function d(c){return a(b(c))}var c=b.pow;d.invert=function(b){return c(a.invert(b))},d.domain=function(e){if(!arguments.length)return a.domain().map(c);b=e[0]<0?bQ:bP,c=b.pow,a.domain(e.map(b));return d},d.nice=function(){a.domain(bD(a.domain(),bE));return d},d.ticks=function(){var d=bC(a.domain()),e=[];if(d.every(isFinite)){var f=Math.floor(d[0]),g=Math.ceil(d[1]),h=Math.round(c(d[0])),i=Math.round(c(d[1]));if(b===bQ){e.push(c(f));for(;f++<g;)for(var j=9;j>0;j--)e.push(c(f)*j)}else{for(;f<g;f++)for(var j=1;j<10;j++)e.push(c(f)*j);e.push(c(f))}for(f=0;e[f]<h;f++);for(g=e.length;e[g-1]>i;g--);e=e.slice(f,g)}return e},d.tickFormat=function(a,e){arguments.length<2&&(e=bO);if(arguments.length<1)return e;var f=a/d.ticks().length,g=b===bQ?(h=-1e-15,Math.floor):(h=1e-15,Math.ceil),h;return function(a){return a/c(g(b(a)+h))<f?e(a):""}},d.copy=function(){return bN(a.copy(),b)};return bG(d,a)}function bM(a,b,c,d){var e=[],f=[],g=0,h=a.length;while(++g<h)e.push(c(a[g-1],a[g])),f.push(d(b[g-1],b[g]));return function(b){var c=d3.bisect(a,b,1,a.length-1)-1;return f[c](e[c](b))}}function bL(a,b,c,d){var e=c(a[0],a[1]),f=d(b[0],b[1]);return function(a){return f(e(a))}}function bK(a,b){return d3.format(",."+Math.max(0,-Math.floor(Math.log(bI(a,b)[2])/Math.LN10+.01))+"f")}function bJ(a,b){return d3.range.apply(d3,bI(a,b))}function bI(a,b){var c=bC(a),d=c[1]-c[0],e=Math.pow(10,Math.floor(Math.log(d/b)/Math.LN10)),f=b/d*e;f<=.15?e*=10:f<=.35?e*=5:f<=.75&&(e*=2),c[0]=Math.ceil(c[0]/e)*e,c[1]=Math.floor(c[1]/e)*e+e*.5,c[2]=e;return c}function bH(a){a=Math.pow(10,Math.round(Math.log(a)/Math.LN10)-1);return{floor:function(b){return Math.floor(b/a)*a},ceil:function(b){return Math.ceil(b/a)*a}}}function bG(a,b){a.range=d3.rebind(a,b.range),a.rangeRound=d3.rebind(a,b.rangeRound),a.interpolate=d3.rebind(a,b.interpolate),a.clamp=d3.rebind(a,b.clamp);return a}function bF(a,b,c,d){function h(a){return e(a)}function g(){var g=a.length==2?bL:bM,i=d?P:O;e=g(a,b,i,c),f=g(b,a,i,d3.interpolate);return h}var e,f;h.invert=function(a){return f(a)},h.domain=function(b){if(!arguments.length)return a;a=b.map(Number);return g()},h.range=function(a){if(!arguments.length)return b;b=a;return g()},h.rangeRound=function(a){return h.range(a).interpolate(d3.interpolateRound)},h.clamp=function(a){if(!arguments.length)return d;d=a;return g()},h.interpolate=function(a){if(!arguments.length)return c;c=a;return g()},h.ticks=function(b){return bJ(a,b)},h.tickFormat=function(b){return bK(a,b)},h.nice=function(){bD(a,bH);return g()},h.copy=function(){return bF(a,b,c,d)};return g()}function bE(){return Math}function bD(a,b){var c=0,d=a.length-1,e=a[c],f=a[d],g;f<e&&(g=c,c=d,d=g,g=e,e=f,f=g);if(g=f-e)b=b(g),a[c]=b.floor(e),a[d]=b.ceil(f);return a}function bC(a){var b=a[0],c=a[a.length-1];return b<c?[b,c]:[c,b]}function bB(){}function bz(){var a=null,b=bv,c=Infinity;while(b)b.flush?b=a?a.next=b.next:bv=b.next:(c=Math.min(c,b.then+b.delay),b=(a=b).next);return c}function by(){var a,b=Date.now(),c=bv;while(c)a=b-c.then,a>=c.delay&&(c.flush=c.callback(a)),c=c.next;var d=bz()-b;d>24?(isFinite(d)&&(clearTimeout(bx),bx=setTimeout(by,d)),bw=0):(bw=1,bA(by))}function bu(a){for(var b=0,c=this.length;b<c;b++)for(var d=this[b],e=0,f=d.length;e<f;e++){var g=d[e];g&&a.call(g=g.node,g.__data__,e,b)}return this}function bp(a){function c(b,c,d){return d!=a&&d3.interpolate(d,a)}function b(b,c,d){var e=a.call(this,b,c);return e==null?d!=""&&bn:d!=e&&d3.interpolate(d,e)}return typeof a=="function"?b:a==null?bo:(a+="",c)}function bo(a,b,c){return c!=""&&bn}function bm(a,b){h(a,bq);var c={},d=d3.dispatch("start","end"),e=bt,f=Date.now();a.id=b,a.tween=function(b,d){if(arguments.length<2)return c[b];d==null?delete c[b]:c[b]=d;return a},a.ease=function(b){if(!arguments.length)return e;e=typeof b=="function"?b:d3.ease.apply(d3,arguments);return a},a.each=function(b,c){if(arguments.length<2)return bu.call(a,b);d[b].add(c);return a},d3.timer(function(g){a.each(function(h,i,j){function r(){--o.count||delete l.__transition__;return 1}function q(a){if(o.active!==b)return r();var c=(a-m)/n,f=e(c),g=k.length;while(g>0)k[--g].call(l,f);if(c>=1){r(),bs=b,d.end.dispatch.call(l,h,i),bs=0;return 1}}function p(a){if(o.active>b)return r();o.active=b;for(var e in c)(e=c[e].call(l,h,i))&&k.push(e);d.start.dispatch.call(l,h,i),q(a)||d3.timer(q,0,f);return 1}var k=[],l=this,m=a[j][i].delay,n=a[j][i].duration,o=l.__transition__||(l.__transition__={active:0,count:0});++o.count,m<=g?p(g):d3.timer(p,m,f)});return 1},0,f);return a}function bk(a){arguments.length||(a=d3.ascending);return function(b,c){return a(b&&b.__data__,c&&c.__data__)}}function bi(a){h(a,bj);return a}function bh(a){return{__data__:a}}function bg(a,b){function h(){(b.apply(this,arguments)?f:g).call(this)}function g(){if(b=this.classList)return b.remove(a);var b=this.className,d=b.baseVal!=null,e=d?b.baseVal:b;e=m(e.replace(c," ")),d?b.baseVal=e:this.className=e}function f(){if(b=this.classList)return b.add(a);var b=this.className,d=b.baseVal!=null,e=d?b.baseVal:b;c.lastIndex=0,c.test(e)||(e=m(e+" "+a),d?b.baseVal=e:this.className=e)}var c=new RegExp("(^|\\s+)"+d3.requote(a)+"(\\s+|$)","g");if(arguments.length<2){var d=this.node();if(e=d.classList)return e.contains(a);var e=d.className;c.lastIndex=0;return c.test(e.baseVal!=null?e.baseVal:e)}return this.each(typeof b=="function"?h:b?f:g)}function be(a){return function(){return bb(a,this)}}function bd(a){return function(){return ba(a,this)}}function _(a){h(a,bc);return a}function $(a,b,c){function g(a){return Math.round(f(a)*255)}function f(a){a>360?a-=360:a<0&&(a+=360);return a<60?d+(e-d)*a/60:a<180?e:a<240?d+(e-d)*(240-a)/60:d}var d,e;a=a%360,a<0&&(a+=360),b=b<0?0:b>1?1:b,c=c<0?0:c>1?1:c,e=c<=.5?c*(1+b):c+b-c*b,d=2*c-e;return Q(g(a+120),g(a),g(a-120))}function Z(a,b,c){this.h=a,this.s=b,this.l=c}function Y(a,b,c){return new Z(a,b,c)}function V(a){var b=parseFloat(a);return a.charAt(a.length-1)==="%"?Math.round(b*2.55):b}function U(a,b,c){var d=Math.min(a/=255,b/=255,c/=255),e=Math.max(a,b,c),f=e-d,g,h,i=(e+d)/2;f?(h=i<.5?f/(e+d):f/(2-e-d),a==e?g=(b-c)/f+(b<c?6:0):b==e?g=(c-a)/f+2:g=(a-b)/f+4,g*=60):h=g=0;return Y(g,h,i)}function T(a,b,c){var d=0,e=0,f=0,g,h,i;g=/([a-z]+)\((.*)\)/i.exec(a);if(g){h=g[2].split(",");switch(g[1]){case"hsl":return c(parseFloat(h[0]),parseFloat(h[1])/100,parseFloat(h[2])/100);case"rgb":return b(V(h[0]),V(h[1]),V(h[2]))}}if(i=W[a])return b(i.r,i.g,i.b);a!=null&&a.charAt(0)==="#"&&(a.length===4?(d=a.charAt(1),d+=d,e=a.charAt(2),e+=e,f=a.charAt(3),f+=f):a.length===7&&(d=a.substring(1,3),e=a.substring(3,5),f=a.substring(5,7)),d=parseInt(d,16),e=parseInt(e,16),f=parseInt(f,16));return b(d,e,f)}function S(a){return a<16?"0"+Math.max(0,a).toString(16):Math.min(255,a).toString(16)}function R(a,b,c){this.r=a,this.g=b,this.b=c}function Q(a,b,c){return new R(a,b,c)}function P(a,b){b=b-(a=+a)?1/(b-a):0;return function(c){return Math.max(0,Math.min(1,(c-a)*b))}}function O(a,b){b=b-(a=+a)?1/(b-a):0;return function(c){return(c-a)*b}}function N(a){return a in M||/\bcolor\b/.test(a)?d3.interpolateRgb:d3.interpolate}function K(a){return a<1/2.75?7.5625*a*a:a<2/2.75?7.5625*(a-=1.5/2.75)*a+.75:a<2.5/2.75?7.5625*(a-=2.25/2.75)*a+.9375:7.5625*(a-=2.625/2.75)*a+.984375}function J(a){a||(a=1.70158);return function(b){return b*b*((a+1)*b-a)}}function I(a,b){var c;arguments.length<2&&(b=.45),arguments.length<1?(a=1,c=b/4):c=b/(2*Math.PI)*Math.asin(1/a);return function(d){return 1+a*Math.pow(2,10*-d)*Math.sin((d-c)*2*Math.PI/b)}}function H(a){return 1-Math.sqrt(1-a*a)}function G(a){return Math.pow(2,10*(a-1))}function F(a){return 1-Math.cos(a*Math.PI/2)}function E(a){return function(b){return Math.pow(b,a)}}function D(a){return a}function C(a){return function(b){return.5*(b<.5?a(2*b):2-a(2-2*b))}}function B(a){return function(b){return 1-a(1-b)}}function A(a){return function(b){return b<=0?0:b>=1?1:a(b)}}function v(a,b){return{scale:Math.pow(10,(8-b)*3),symbol:a}}function t(a){var b=a.lastIndexOf("."),c=b>=0?a.substring(b):(b=a.length,""),d=[];while(b>0)d.push(a.substring(b-=3,b+3));return d.reverse().join(",")+c}function s(a){return a+""}function r(a,b){return b-(a?1+Math.floor(Math.log(a+Math.pow(10,1+Math.floor(Math.log(a)/Math.LN10)-b))/Math.LN10):1)}function o(a){var b={},c=[];b.add=function(a){for(var d=0;d<c.length;d++)if(c[d].listener==a)return b;c.push({listener:a,on:!0});return b},b.remove=function(a){for(var d=0;d<c.length;d++){var e=c[d];if(e.listener==a){e.on=!1,c=c.slice(0,d).concat(c.slice(d+1));break}}return b},b.dispatch=function(){var a=c;for(var b=0,d=a.length;b<d;b++){var e=a[b];e.on&&e.listener.apply(this,arguments)}};return b}function m(a){return a.replace(/(^\s+)|(\s+$)/g,"").replace(/\s+/g," ")}function l(a){return a==null}function k(a){return a.length}function j(a){return a!=null&&!isNaN(a)}function i(){return this}function f(a){return Array.prototype.slice.call(a)}function e(a){var b=-1,c=a.length,d=[];while(++b<c)d.push(a[b]);return d}Date.now||(Date.now=function(){return+(new Date)});try{document.createElement("div").style.setProperty("opacity",0,"")}catch(a){var b=CSSStyleDeclaration.prototype,c=b.setProperty;b.setProperty=function(a,b,d){c.call(this,a,b+"",d)}}d3={version:"2.4.3"};var d=f;try{d(document.documentElement.childNodes)[0].nodeType}catch(g){d=e}var h=[].__proto__?function(a,b){a.__proto__=b}:function(a,b){for(var c in b)a[c]=b[c]};d3.functor=function(a){return typeof a=="function"?a:function(){return a}},d3.rebind=function(a,b){return function(){var c=b.apply(a,arguments);return arguments.length?a:c}},d3.ascending=function(a,b){return a<b?-1:a>b?1:a>=b?0:NaN},d3.descending=function(a,b){return b<a?-1:b>a?1:b>=a?0:NaN},d3.mean=function(a,b){var c=a.length,d,e=0,f=-1,g=0;if(arguments.length===1)while(++f<c)j(d=a[f])&&(e+=(d-e)/++g);else while(++f<c)j(d=b.call(a,a[f],f))&&(e+=(d-e)/++g);return g?e:undefined},d3.median=function(a,b){arguments.length>1&&(a=a.map(b)),a=a.filter(j);return a.length?d3.quantile(a.sort(d3.ascending),.5):undefined},d3.min=function(a,b){var c=-1,d=a.length,e,f;if(arguments.length===1){while(++c<d&&((e=a[c])==null||e!=e))e=undefined;while(++c<d)(f=a[c])!=null&&e>f&&(e=f)}else{while(++c<d&&((e=b.call(a,a[c],c))==null||e!=e))e=undefined;while(++c<d)(f=b.call(a,a[c],c))!=null&&e>f&&(e=f)}return e},d3.max=function(a,b){var c=-1,d=a.length,e,f;if(arguments.length===1){while(++c<d&&((e=a[c])==null||e!=e))e=undefined;while(++c<d)(f=a[c])!=null&&f>e&&(e=f)}else{while(++c<d&&((e=b.call(a,a[c],c))==null||e!=e))e=undefined;while(++c<d)(f=b.call(a,a[c],c))!=null&&f>e&&(e=f)}return e},d3.sum=function(a,b){var c=0,d=a.length,e,f=-1;if(arguments.length===1)while(++f<d)isNaN(e=+a[f])||(c+=e);else while(++f<d)isNaN(e=+b.call(a,a[f],f))||(c+=e);return c},d3.quantile=function(a,b){var c=(a.length-1)*b+1,d=Math.floor(c),e=a[d-1],f=c-d;return f?e+f*(a[d]-e):e},d3.zip=function(){if(!(e=arguments.length))return[];for(var a=-1,b=d3.min(arguments,k),c=Array(b);++a<b;)for(var d=-1,e,f=c[a]=Array(e);++d<e;)f[d]=arguments[d][a];return c},d3.bisectLeft=function(a,b,c,d){arguments.length<3&&(c=0),arguments.length<4&&(d=a.length);while(c<d){var e=c+d>>1;a[e]<b?c=e+1:d=e}return c},d3.bisect=d3.bisectRight=function(a,b,c,d){arguments.length<3&&(c=0),arguments.length<4&&(d=a.length);while(c<d){var e=c+d>>1;b<a[e]?d=e:c=e+1}return c},d3.first=function(a,b){var c=0,d=a.length,e=a[0],f;arguments.length===1&&(b=d3.ascending);while(++c<d)b.call(a,e,f=a[c])>0&&(e=f);return e},d3.last=function(a,b){var c=0,d=a.length,e=a[0],f;arguments.length===1&&(b=d3.ascending);while(++c<d)b.call(a,e,f=a[c])<=0&&(e=f);return e},d3.nest=function(){function g(a,d){if(d>=b.length)return a;var e=[],f=c[d++],h;for(h in a)e.push({key:h,values:g(a[h],d)});f&&e.sort(function(a,b){return f(a.key,b.key)});return e}function f(c,g){if(g>=b.length)return e?e.call(a,c):d?c.sort(d):c;var h=-1,i=c.length,j=b[g++],k,l,m={};while(++h<i)(k=j(l=c[h]))in m?m[k].push(l):m[k]=[l];for(k in m)m[k]=f(m[k],g);return m}var a={},b=[],c=[],d,e;a.map=function(a){return f(a,0)},a.entries=function(a){return g(f(a,0),0)},a.key=function(c){b.push(c);return a},a.sortKeys=function(d){c[b.length-1]=d;return a},a.sortValues=function(b){d=b;return a},a.rollup=function(b){e=b;return a};return a},d3.keys=function(a){var b=[];for(var c in a)b.push(c);return b},d3.values=function(a){var b=[];for(var c in a)b.push(a[c]);return b},d3.entries=function(a){var b=[];for(var c in a)b.push({key:c,value:a[c]});return b},d3.permute=function(a,b){var c=[],d=-1,e=b.length;while(++d<e)c[d]=a[b[d]];return c},d3.merge=function(a){return Array.prototype.concat.apply([],a)},d3.split=function(a,b){var c=[],d=[],e,f=-1,g=a.length;arguments.length<2&&(b=l);while(++f<g)b.call(d,e=a[f],f)?d=[]:(d.length||c.push(d),d.push(e));return c},d3.range=function(a,b,c){arguments.length<3&&(c=1,arguments.length<2&&(b=a,a=0));if((b-a)/c==Infinity)throw new Error("infinite range");var d=[],e=-1,f;if(c<0)while((f=a+c*++e)>b)d.push(f);else while((f=a+c*++e)<b)d.push(f);return d},d3.requote=function(a){return a.replace(n,"\\$&")};var n=/[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g;d3.round=function(a,b){return b?Math.round(a*Math.pow(10,b))*Math.pow(10,-b):Math.round(a)},d3.xhr=function(a,b,c){var d=new XMLHttpRequest;arguments.length<3?c=b:b&&d.overrideMimeType&&d.overrideMimeType(b),d.open("GET",a,!0),d.onreadystatechange=function(){d.readyState===4&&c(d.status<300?d:null)},d.send(null)},d3.text=function(a,b,c){function d(a){c(a&&a.responseText)}arguments.length<3&&(c=b,b=null),d3.xhr(a,b,d)},d3.json=function(a,b){d3.text(a,"application/json",function(a){b(a?JSON.parse(a):null)})},d3.html=function(a,b){d3.text(a,"text/html",function(a){if(a!=null){var c=document.createRange();c.selectNode(document.body),a=c.createContextualFragment(a)}b(a)})},d3.xml=function(a,b,c){function d(a){c(a&&a.responseXML)}arguments.length<3&&(c=b,b=null),d3.xhr(a,b,d)},d3.ns={prefix:{svg:"http://www.w3.org/2000/svg",xhtml:"http://www.w3.org/1999/xhtml",xlink:"http://www.w3.org/1999/xlink",xml:"http://www.w3.org/XML/1998/namespace",xmlns:"http://www.w3.org/2000/xmlns/"},qualify:function(a){var b=a.indexOf(":");return b<0?a:{space:d3.ns.prefix[a.substring(0,b)],local:a.substring(b+1)}}},d3.dispatch=function(a){var b={},c;for(var d=0,e=arguments.length;d<e;d++)c=arguments[d],b[c]=o(c);return b},d3.format=function(a){var b=p.exec(a),c=b[1]||" ",d=b[3]||"",e=b[5],f=+b[6],g=b[7],h=b[8],i=b[9],j=1,k="",l=!1;h&&(h=+h.substring(1)),e&&(c="0",g&&(f-=Math.floor((f-1)/4)));switch(i){case"n":g=!0,i="g";break;case"%":j=100,k="%",i="f";break;case"p":j=100,k="%",i="r";break;case"d":l=!0,h=0;break;case"s":j=-1,i="r"}i=="r"&&!h&&(i="g"),i=q[i]||s;return function(a){if(l&&a%1)return"";var b=a<0&&(a=-a)?"−":d;if(j<0){var m=d3.formatPrefix(a,h);a*=m.scale,k=m.symbol}else a*=j;a=i(a,h);if(e){var n=a.length+b.length;n<f&&(a=Array(f-n+1).join(c)+a),g&&(a=t(a)),a=b+a}else{g&&(a=t(a)),a=b+a;var n=a.length;n<f&&(a=Array(f-n+1).join(c)+a)}return a+k}};var p=/(?:([^{])?([<>=^]))?([+\- ])?(#)?(0)?([0-9]+)?(,)?(\.[0-9]+)?([a-zA-Z%])?/,q={g:function(a,b){return a.toPrecision(b)},e:function(a,b){return a.toExponential(b)},f:function(a,b){return a.toFixed(b)},r:function(a,b){return d3.round(a,b=r(a,b)).toFixed(Math.max(0,Math.min(20,b)))}},u=["y","z","a","f","p","n","μ","m","","k","M","G","T","P","E","Z","Y"].map(v);d3.formatPrefix=function(a,b){var c=0;a&&(a<0&&(a*=-1),b&&(a=d3.round(a,r(a,b))),c=1+Math.floor(1e-12+Math.log(a)/Math.LN10),c=Math.max(-24,Math.min(24,Math.floor((c<=0?c+1:c-1)/3)*3)));return u[8+c/3]};var w=E(2),x=E(3),y={linear:function(){return D},poly:E,quad:function(){return w},cubic:function(){return x},sin:function(){return F},exp:function(){return G},circle:function(){return H},elastic:I,back:J,bounce:function(){return K}},z={"in":function(a){return a},out:B,"in-out":C,"out-in":function(a){return C(B(a))}};d3.ease=function(a){var b=a.indexOf("-"),c=b>=0?a.substring(0,b):a,d=b>=0?a.substring(b+1):"in";return A(z[d](y[c].apply(null,Array.prototype.slice.call(arguments,1))))},d3.event=null,d3.interpolate=function(a,b){var c=d3.interpolators.length,d;while(--c>=0&&!(d=d3.interpolators[c](a,b)));return d},d3.interpolateNumber=function(a,b){b-=a;return function(c){return a+b*c}},d3.interpolateRound=function(a,b){b-=a;return function(c){return Math.round(a+b*c)}},d3.interpolateString=function(a,b){var c,d,e,f=0,g=0,h=[],i=[],j,k;L.lastIndex=0;for(d=0;c=L.exec(b);++d)c.index&&h.push(b.substring(f,g=c.index)),i.push({i:h.length,x:c[0]}),h.push(null),f=L.lastIndex;f<b.length&&h.push(b.substring(f));for(d=0,j=i.length;(c=L.exec(a))&&d<j;++d){k=i[d];if(k.x==c[0]){if(k.i)if(h[k.i+1]==null){h[k.i-1]+=k.x,h.splice(k.i,1);for(e=d+1;e<j;++e)i[e].i--}else{h[k.i-1]+=k.x+h[k.i+1],h.splice(k.i,2);for(e=d+1;e<j;++e)i[e].i-=2}else if(h[k.i+1]==null)h[k.i]=k.x;else{h[k.i]=k.x+h[k.i+1],h.splice(k.i+1,1);for(e=d+1;e<j;++e)i[e].i--}i.splice(d,1),j--,d--}else k.x=d3.interpolateNumber(parseFloat(c[0]),parseFloat(k.x))}while(d<j)k=i.pop(),h[k.i+1]==null?h[k.i]=k.x:(h[k.i]=k.x+h[k.i+1],h.splice(k.i+1,1)),j--;return h.length===1?h[0]==null?i[0].x:function(){return b}:function(a){for(d=0;d<j;++d)h[(k=i[d]).i]=k.x(a);return h.join("")}},d3.interpolateRgb=function(a,b){a=d3.rgb(a),b=d3.rgb(b);var c=a.r,d=a.g,e=a.b,f=b.r-c,g=b.g-d,h=b.b-e;return function(a){return"#"+S(Math.round(c+f*a))+S(Math.round(d+g*a))+S(Math.round(e+h*a))}},d3.interpolateHsl=function(a,b){a=d3.hsl(a),b=d3.hsl(b);var c=a.h,d=a.s,e=a.l,f=b.h-c,g=b.s-d,h=b.l-e;return function(a){return $(c+f*a,d+g*a,e+h*a).toString()}},d3.interpolateArray=function(a,b){var c=[],d=[],e=a.length,f=b.length,g=Math.min(a.length,b.length),h;for(h=0;h<g;++h)c.push(d3.interpolate(a[h],b[h]));for(;h<e;++h)d[h]=a[h];for(;h<f;++h)d[h]=b[h];return function(a){for(h=0;h<g;++h)d[h]=c[h](a);return d}},d3.interpolateObject=function(a,b){var c={},d={},e;for(e in a)e in b?c[e]=N(e)(a[e],b[e]):d[e]=a[e];for(e in b)e in a||(d[e]=b[e]);return function(a){for(e in c)d[e]=c[e](a);return d}};var L=/[-+]?(?:\d+\.\d+|\d+\.|\.\d+|\d+)(?:[eE][-]?\d+)?/g,M={background:1,fill:1,stroke:1};d3.interpolators=[d3.interpolateObject,function(a,b){return b instanceof Array&&d3.interpolateArray(a,b)},function(a,b){return typeof b=="string"&&d3.interpolateString(String(a),b)},function(a,b){return(typeof b=="string"?b in W||/^(#|rgb\(|hsl\()/.test(b):b instanceof R||b instanceof Z)&&d3.interpolateRgb(String(a),b)},function(a,b){return typeof b=="number"&&d3.interpolateNumber(+a,b)}],d3.rgb=function(a,b,c){return arguments.length===1?a instanceof R?Q(a.r,a.g,a.b):T(""+a,Q,$):Q(~~a,~~b,~~c)},R.prototype.brighter=function(a){a=Math.pow(.7,arguments.length?a:1);var b=this.r,c=this.g,d=this.b,e=30;if(!b&&!c&&!d)return Q(e,e,e);b&&b<e&&(b=e),c&&c<e&&(c=e),d&&d<e&&(d=e);return Q(Math.min(255,Math.floor(b/a)),Math.min(255,Math.floor(c/a)),Math.min(255,Math.floor(d/a)))},R.prototype.darker=function(a){a=Math.pow(.7,arguments.length?a:1);return Q(Math.floor(a*this.r),Math.floor(a*this.g),Math.floor(a*this.b))},R.prototype.hsl=function(){return U(this.r,this.g,this.b)},R.prototype.toString=function(){return"#"+S(this.r)+S(this.g)+S(this.b)};var W={aliceblue:"#f0f8ff",antiquewhite:"#faebd7",aqua:"#00ffff",aquamarine:"#7fffd4",azure:"#f0ffff",beige:"#f5f5dc",bisque:"#ffe4c4",black:"#000000",blanchedalmond:"#ffebcd",blue:"#0000ff",blueviolet:"#8a2be2",brown:"#a52a2a",burlywood:"#deb887",cadetblue:"#5f9ea0",chartreuse:"#7fff00",chocolate:"#d2691e",coral:"#ff7f50",cornflowerblue:"#6495ed",cornsilk:"#fff8dc",crimson:"#dc143c",cyan:"#00ffff",darkblue:"#00008b",darkcyan:"#008b8b",darkgoldenrod:"#b8860b",darkgray:"#a9a9a9",darkgreen:"#006400",darkgrey:"#a9a9a9",darkkhaki:"#bdb76b",darkmagenta:"#8b008b",darkolivegreen:"#556b2f",darkorange:"#ff8c00",darkorchid:"#9932cc",darkred:"#8b0000",darksalmon:"#e9967a",darkseagreen:"#8fbc8f",darkslateblue:"#483d8b",darkslategray:"#2f4f4f",darkslategrey:"#2f4f4f",darkturquoise:"#00ced1",darkviolet:"#9400d3",deeppink:"#ff1493",deepskyblue:"#00bfff",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1e90ff",firebrick:"#b22222",floralwhite:"#fffaf0",forestgreen:"#228b22",fuchsia:"#ff00ff",gainsboro:"#dcdcdc",ghostwhite:"#f8f8ff",gold:"#ffd700",goldenrod:"#daa520",gray:"#808080",green:"#008000",greenyellow:"#adff2f",grey:"#808080",honeydew:"#f0fff0",hotpink:"#ff69b4",indianred:"#cd5c5c",indigo:"#4b0082",ivory:"#fffff0",khaki:"#f0e68c",lavender:"#e6e6fa",lavenderblush:"#fff0f5",lawngreen:"#7cfc00",lemonchiffon:"#fffacd",lightblue:"#add8e6",lightcoral:"#f08080",lightcyan:"#e0ffff",lightgoldenrodyellow:"#fafad2",lightgray:"#d3d3d3",lightgreen:"#90ee90",lightgrey:"#d3d3d3",lightpink:"#ffb6c1",lightsalmon:"#ffa07a",lightseagreen:"#20b2aa",lightskyblue:"#87cefa",lightslategray:"#778899",lightslategrey:"#778899",lightsteelblue:"#b0c4de",lightyellow:"#ffffe0",lime:"#00ff00",limegreen:"#32cd32",linen:"#faf0e6",magenta:"#ff00ff",maroon:"#800000",mediumaquamarine:"#66cdaa",mediumblue:"#0000cd",mediumorchid:"#ba55d3",mediumpurple:"#9370db",mediumseagreen:"#3cb371",mediumslateblue:"#7b68ee",mediumspringgreen:"#00fa9a",mediumturquoise:"#48d1cc",mediumvioletred:"#c71585",midnightblue:"#191970",mintcream:"#f5fffa",mistyrose:"#ffe4e1",moccasin:"#ffe4b5",navajowhite:"#ffdead",navy:"#000080",oldlace:"#fdf5e6",olive:"#808000",olivedrab:"#6b8e23",orange:"#ffa500",orangered:"#ff4500",orchid:"#da70d6",palegoldenrod:"#eee8aa",palegreen:"#98fb98",paleturquoise:"#afeeee",palevioletred:"#db7093",papayawhip:"#ffefd5",peachpuff:"#ffdab9",peru:"#cd853f",pink:"#ffc0cb",plum:"#dda0dd",powderblue:"#b0e0e6",purple:"#800080",red:"#ff0000",rosybrown:"#bc8f8f",royalblue:"#4169e1",saddlebrown:"#8b4513",salmon:"#fa8072",sandybrown:"#f4a460",seagreen:"#2e8b57",seashell:"#fff5ee",sienna:"#a0522d",silver:"#c0c0c0",skyblue:"#87ceeb",slateblue:"#6a5acd",slategray:"#708090",slategrey:"#708090",snow:"#fffafa",springgreen:"#00ff7f",steelblue:"#4682b4",tan:"#d2b48c",teal:"#008080",thistle:"#d8bfd8",tomato:"#ff6347",turquoise:"#40e0d0",violet:"#ee82ee",wheat:"#f5deb3",white:"#ffffff",whitesmoke:"#f5f5f5",yellow:"#ffff00",yellowgreen:"#9acd32"};for(var X in W)W[X]=T(W[X],Q,$);d3.hsl=function(a,b,c){return arguments.length===1?a instanceof Z?Y(a.h,a.s,a.l):T(""+a,U,Y):Y(+a,+b,+c)},Z.prototype.brighter=function(a){a=Math.pow(.7,arguments.length?a:1);return Y(this.h,this.s,this.l/a)},Z.prototype.darker=function(a){a=Math.pow(.7,arguments.length?a:1);return Y(this.h,this.s
+,a*this.l)},Z.prototype.rgb=function(){return $(this.h,this.s,this.l)},Z.prototype.toString=function(){return this.rgb().toString()};var ba=function(a,b){return b.querySelector(a)},bb=function(a,b){return b.querySelectorAll(a)};typeof Sizzle=="function"&&(ba=function(a,b){return Sizzle(a,b)[0]},bb=function(a,b){return Sizzle.uniqueSort(Sizzle(a,b))});var bc=[];d3.selection=function(){return bl},d3.selection.prototype=bc,bc.select=function(a){var b=[],c,d,e,f;typeof a!="function"&&(a=bd(a));for(var g=-1,h=this.length;++g<h;){b.push(c=[]),c.parentNode=(e=this[g]).parentNode;for(var i=-1,j=e.length;++i<j;)(f=e[i])?(c.push(d=a.call(f,f.__data__,i)),d&&"__data__"in f&&(d.__data__=f.__data__)):c.push(null)}return _(b)},bc.selectAll=function(a){var b=[],c,e;typeof a!="function"&&(a=be(a));for(var f=-1,g=this.length;++f<g;)for(var h=this[f],i=-1,j=h.length;++i<j;)if(e=h[i])b.push(c=d(a.call(e,e.__data__,i))),c.parentNode=e;return _(b)},bc.attr=function(a,b){function i(){var c=b.apply(this,arguments);c==null?this.removeAttributeNS(a.space,a.local):this.setAttributeNS(a.space,a.local,c)}function h(){var c=b.apply(this,arguments);c==null?this.removeAttribute(a):this.setAttribute(a,c)}function g(){this.setAttributeNS(a.space,a.local,b)}function f(){this.setAttribute(a,b)}function e(){this.removeAttributeNS(a.space,a.local)}function d(){this.removeAttribute(a)}a=d3.ns.qualify(a);if(arguments.length<2){var c=this.node();return a.local?c.getAttributeNS(a.space,a.local):c.getAttribute(a)}return this.each(b==null?a.local?e:d:typeof b=="function"?a.local?i:h:a.local?g:f)},bc.classed=function(a,b){var c=a.split(bf),d=c.length,e=-1;if(arguments.length>1){while(++e<d)bg.call(this,c[e],b);return this}while(++e<d)if(!bg.call(this,c[e]))return!1;return!0};var bf=/\s+/g;bc.style=function(a,b,c){function f(){var d=b.apply(this,arguments);d==null?this.style.removeProperty(a):this.style.setProperty(a,d,c)}function e(){this.style.setProperty(a,b,c)}function d(){this.style.removeProperty(a)}arguments.length<3&&(c="");return arguments.length<2?window.getComputedStyle(this.node(),null).getPropertyValue(a):this.each(b==null?d:typeof b=="function"?f:e)},bc.property=function(a,b){function e(){var c=b.apply(this,arguments);c==null?delete this[a]:this[a]=c}function d(){this[a]=b}function c(){delete this[a]}return arguments.length<2?this.node()[a]:this.each(b==null?c:typeof b=="function"?e:d)},bc.text=function(a){return arguments.length<1?this.node().textContent:this.each(typeof a=="function"?function(){this.textContent=a.apply(this,arguments)}:function(){this.textContent=a})},bc.html=function(a){return arguments.length<1?this.node().innerHTML:this.each(typeof a=="function"?function(){this.innerHTML=a.apply(this,arguments)}:function(){this.innerHTML=a})},bc.append=function(a){function c(){return this.appendChild(document.createElementNS(a.space,a.local))}function b(){return this.appendChild(document.createElement(a))}a=d3.ns.qualify(a);return this.select(a.local?c:b)},bc.insert=function(a,b){function d(){return this.insertBefore(document.createElementNS(a.space,a.local),ba(b,this))}function c(){return this.insertBefore(document.createElement(a),ba(b,this))}a=d3.ns.qualify(a);return this.select(a.local?d:c)},bc.remove=function(){return this.each(function(){var a=this.parentNode;a&&a.removeChild(this)})},bc.data=function(a,b){function f(a,f){var g,h=a.length,i=f.length,j=Math.min(h,i),k=Math.max(h,i),l=[],m=[],n=[],o,p;if(b){var q={},r=[],s,t=f.length;for(g=-1;++g<h;)s=b.call(o=a[g],o.__data__,g),s in q?n[t++]=o:q[s]=o,r.push(s);for(g=-1;++g<i;)o=q[s=b.call(f,p=f[g],g)],o?(o.__data__=p,l[g]=o,m[g]=n[g]=null):(m[g]=bh(p),l[g]=n[g]=null),delete q[s];for(g=-1;++g<h;)r[g]in q&&(n[g]=a[g])}else{for(g=-1;++g<j;)o=a[g],p=f[g],o?(o.__data__=p,l[g]=o,m[g]=n[g]=null):(m[g]=bh(p),l[g]=n[g]=null);for(;g<i;++g)m[g]=bh(f[g]),l[g]=n[g]=null;for(;g<k;++g)n[g]=a[g],m[g]=l[g]=null}m.update=l,m.parentNode=l.parentNode=n.parentNode=a.parentNode,c.push(m),d.push(l),e.push(n)}var c=[],d=[],e=[],g=-1,h=this.length,i;if(typeof a=="function")while(++g<h)f(i=this[g],a.call(i,i.parentNode.__data__,g));else while(++g<h)f(i=this[g],a);var j=_(d);j.enter=function(){return bi(c)},j.exit=function(){return _(e)};return j};var bj=[];bj.append=bc.append,bj.insert=bc.insert,bj.empty=bc.empty,bj.select=function(a){var b=[],c,d,e,f,g;for(var h=-1,i=this.length;++h<i;){e=(f=this[h]).update,b.push(c=[]),c.parentNode=f.parentNode;for(var j=-1,k=f.length;++j<k;)(g=f[j])?(c.push(e[j]=d=a.call(f.parentNode,g.__data__,j)),d.__data__=g.__data__):c.push(null)}return _(b)},bc.filter=function(a){var b=[],c,d,e;for(var f=0,g=this.length;f<g;f++){b.push(c=[]),c.parentNode=(d=this[f]).parentNode;for(var h=0,i=d.length;h<i;h++)(e=d[h])&&a.call(e,e.__data__,h)&&c.push(e)}return _(b)},bc.map=function(a){return this.each(function(){this.__data__=a.apply(this,arguments)})},bc.sort=function(a){a=bk.apply(this,arguments);for(var b=0,c=this.length;b<c;b++)for(var d=this[b].sort(a),e=1,f=d.length,g=d[0];e<f;e++){var h=d[e];h&&(g&&g.parentNode.insertBefore(h,g.nextSibling),g=h)}return this},bc.on=function(a,b,c){arguments.length<3&&(c=!1);var d="__on"+a,e=a.indexOf(".");e>0&&(a=a.substring(0,e));return arguments.length<2?(e=this.node()[d])&&e._:this.each(function(e,f){function h(a){var c=d3.event;d3.event=a;try{b.call(g,g.__data__,f)}finally{d3.event=c}}var g=this;g[d]&&g.removeEventListener(a,g[d],c),b&&g.addEventListener(a,g[d]=h,c),h._=b})},bc.each=function(a){for(var b=-1,c=this.length;++b<c;)for(var d=this[b],e=-1,f=d.length;++e<f;){var g=d[e];g&&a.call(g,g.__data__,e,b)}return this},bc.call=function(a){a.apply(this,(arguments[0]=this,arguments));return this},bc.empty=function(){return!this.node()},bc.node=function(a){for(var b=0,c=this.length;b<c;b++)for(var d=this[b],e=0,f=d.length;e<f;e++){var g=d[e];if(g)return g}return null},bc.transition=function(){var a=[],b,c;for(var d=-1,e=this.length;++d<e;){a.push(b=[]);for(var f=this[d],g=-1,h=f.length;++g<h;)b.push((c=f[g])?{node:c,delay:0,duration:250}:null)}return bm(a,bs||++br)};var bl=_([[document]]);bl[0].parentNode=document.documentElement,d3.select=function(a){return typeof a=="string"?bl.select(a):_([[a]])},d3.selectAll=function(a){return typeof a=="string"?bl.selectAll(a):_([d(a)])};var bn={},bq=[],br=0,bs=0,bt=d3.ease("cubic-in-out");bq.call=bc.call,d3.transition=function(){return bl.transition()},d3.transition.prototype=bq,bq.select=function(a){var b=[],c,d,e;typeof a!="function"&&(a=bd(a));for(var f=-1,g=this.length;++f<g;){b.push(c=[]);for(var h=this[f],i=-1,j=h.length;++i<j;)(e=h[i])&&(d=a.call(e.node,e.node.__data__,i))?("__data__"in e.node&&(d.__data__=e.node.__data__),c.push({node:d,delay:e.delay,duration:e.duration})):c.push(null)}return bm(b,this.id).ease(this.ease())},bq.selectAll=function(a){var b=[],c,d,e;typeof a!="function"&&(a=be(a));for(var f=-1,g=this.length;++f<g;)for(var h=this[f],i=-1,j=h.length;++i<j;)if(e=h[i]){d=a.call(e.node,e.node.__data__,i),b.push(c=[]);for(var k=-1,l=d.length;++k<l;)c.push({node:d[k],delay:e.delay,duration:e.duration})}return bm(b,this.id).ease(this.ease())},bq.attr=function(a,b){return this.attrTween(a,bp(b))},bq.attrTween=function(a,b){function e(a,d){var e=b.call(this,a,d,this.getAttributeNS(c.space,c.local));return e===bn?(this.removeAttributeNS(c.space,c.local),null):e&&function(a){this.setAttributeNS(c.space,c.local,e(a))}}function d(a,d){var e=b.call(this,a,d,this.getAttribute(c));return e===bn?(this.removeAttribute(c),null):e&&function(a){this.setAttribute(c,e(a))}}var c=d3.ns.qualify(a);return this.tween("attr."+a,c.local?e:d)},bq.style=function(a,b,c){arguments.length<3&&(c="");return this.styleTween(a,bp(b),c)},bq.styleTween=function(a,b,c){arguments.length<3&&(c="");return this.tween("style."+a,function(d,e){var f=b.call(this,d,e,window.getComputedStyle(this,null).getPropertyValue(a));return f===bn?(this.style.removeProperty(a),null):f&&function(b){this.style.setProperty(a,f(b),c)}})},bq.text=function(a){return this.tween("text",function(b,c){this.textContent=typeof a=="function"?a.call(this,b,c):a})},bq.remove=function(){return this.each("end",function(){var a;!this.__transition__&&(a=this.parentNode)&&a.removeChild(this)})},bq.delay=function(a){var b=this;return b.each(typeof a=="function"?function(c,d,e){b[e][d].delay=+a.apply(this,arguments)}:(a=+a,function(c,d,e){b[e][d].delay=a}))},bq.duration=function(a){var b=this;return b.each(typeof a=="function"?function(c,d,e){b[e][d].duration=+a.apply(this,arguments)}:(a=+a,function(c,d,e){b[e][d].duration=a}))},bq.transition=function(){return this.select(i)};var bv=null,bw,bx;d3.timer=function(a,b,c){var d=!1,e,f=bv;if(arguments.length<3){if(arguments.length<2)b=0;else if(!isFinite(b))return;c=Date.now()}while(f){if(f.callback===a){f.then=c,f.delay=b,d=!0;break}e=f,f=f.next}d||(bv={callback:a,then:c,delay:b,next:bv}),bw||(bx=clearTimeout(bx),bw=1,bA(by))},d3.timer.flush=function(){var a,b=Date.now(),c=bv;while(c)a=b-c.then,c.delay||(c.flush=c.callback(a)),c=c.next;bz()};var bA=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(a){setTimeout(a,17)};d3.scale={},d3.scale.linear=function(){return bF([0,1],[0,1],d3.interpolate,!1)},d3.scale.log=function(){return bN(d3.scale.linear(),bP)};var bO=d3.format("e");bP.pow=function(a){return Math.pow(10,a)},bQ.pow=function(a){return-Math.pow(10,-a)},d3.scale.pow=function(){return bR(d3.scale.linear(),1)},d3.scale.sqrt=function(){return d3.scale.pow().exponent(.5)},d3.scale.ordinal=function(){return bT([],{t:"range",x:[]})},d3.scale.category10=function(){return d3.scale.ordinal().range(bU)},d3.scale.category20=function(){return d3.scale.ordinal().range(bV)},d3.scale.category20b=function(){return d3.scale.ordinal().range(bW)},d3.scale.category20c=function(){return d3.scale.ordinal().range(bX)};var bU=["#1f77b4","#ff7f0e","#2ca02c","#d62728","#9467bd","#8c564b","#e377c2","#7f7f7f","#bcbd22","#17becf"],bV=["#1f77b4","#aec7e8","#ff7f0e","#ffbb78","#2ca02c","#98df8a","#d62728","#ff9896","#9467bd","#c5b0d5","#8c564b","#c49c94","#e377c2","#f7b6d2","#7f7f7f","#c7c7c7","#bcbd22","#dbdb8d","#17becf","#9edae5"],bW=["#393b79","#5254a3","#6b6ecf","#9c9ede","#637939","#8ca252","#b5cf6b","#cedb9c","#8c6d31","#bd9e39","#e7ba52","#e7cb94","#843c39","#ad494a","#d6616b","#e7969c","#7b4173","#a55194","#ce6dbd","#de9ed6"],bX=["#3182bd","#6baed6","#9ecae1","#c6dbef","#e6550d","#fd8d3c","#fdae6b","#fdd0a2","#31a354","#74c476","#a1d99b","#c7e9c0","#756bb1","#9e9ac8","#bcbddc","#dadaeb","#636363","#969696","#bdbdbd","#d9d9d9"];d3.scale.quantile=function(){return bY([],[])},d3.scale.quantize=function(){return bZ(0,1,[0,1])},d3.svg={},d3.svg.arc=function(){function e(){var e=a.apply(this,arguments),f=b.apply(this,arguments),g=c.apply(this,arguments)+b$,h=d.apply(this,arguments)+b$,i=(h<g&&(i=g,g=h,h=i),h-g),j=i<Math.PI?"0":"1",k=Math.cos(g),l=Math.sin(g),m=Math.cos(h),n=Math.sin(h);return i>=b_?e?"M0,"+f+"A"+f+","+f+" 0 1,1 0,"+ -f+"A"+f+","+f+" 0 1,1 0,"+f+"M0,"+e+"A"+e+","+e+" 0 1,0 0,"+ -e+"A"+e+","+e+" 0 1,0 0,"+e+"Z":"M0,"+f+"A"+f+","+f+" 0 1,1 0,"+ -f+"A"+f+","+f+" 0 1,1 0,"+f+"Z":e?"M"+f*k+","+f*l+"A"+f+","+f+" 0 "+j+",1 "+f*m+","+f*n+"L"+e*m+","+e*n+"A"+e+","+e+" 0 "+j+",0 "+e*k+","+e*l+"Z":"M"+f*k+","+f*l+"A"+f+","+f+" 0 "+j+",1 "+f*m+","+f*n+"L0,0"+"Z"}var a=ca,b=cb,c=cc,d=cd;e.innerRadius=function(b){if(!arguments.length)return a;a=d3.functor(b);return e},e.outerRadius=function(a){if(!arguments.length)return b;b=d3.functor(a);return e},e.startAngle=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.endAngle=function(a){if(!arguments.length)return d;d=d3.functor(a);return e},e.centroid=function(){var e=(a.apply(this,arguments)+b.apply(this,arguments))/2,f=(c.apply(this,arguments)+d.apply(this,arguments))/2+b$;return[Math.cos(f)*e,Math.sin(f)*e]};return e};var b$=-Math.PI/2,b_=2*Math.PI-1e-6;d3.svg.line=function(){return ce(Object)};var ci={linear:cj,"step-before":ck,"step-after":cl,basis:cr,"basis-open":cs,"basis-closed":ct,bundle:cu,cardinal:co,"cardinal-open":cm,"cardinal-closed":cn,monotone:cD},cw=[0,2/3,1/3,0],cx=[0,1/3,2/3,0],cy=[0,1/6,2/3,1/6];d3.svg.line.radial=function(){var a=ce(cE);a.radius=a.x,delete a.x,a.angle=a.y,delete a.y;return a},ck.reverse=cl,cl.reverse=ck,d3.svg.area=function(){return cF(Object)},d3.svg.area.radial=function(){var a=cF(cE);a.radius=a.x,delete a.x,a.innerRadius=a.x0,delete a.x0,a.outerRadius=a.x1,delete a.x1,a.angle=a.y,delete a.y,a.startAngle=a.y0,delete a.y0,a.endAngle=a.y1,delete a.y1;return a},d3.svg.chord=function(){function j(a,b,c,d){return"Q 0,0 "+d}function i(a,b){return"A"+a+","+a+" 0 0,1 "+b}function h(a,b){return a.a0==b.a0&&a.a1==b.a1}function g(a,b,f,g){var h=b.call(a,f,g),i=c.call(a,h,g),j=d.call(a,h,g)+b$,k=e.call(a,h,g)+b$;return{r:i,a0:j,a1:k,p0:[i*Math.cos(j),i*Math.sin(j)],p1:[i*Math.cos(k),i*Math.sin(k)]}}function f(c,d){var e=g(this,a,c,d),f=g(this,b,c,d);return"M"+e.p0+i(e.r,e.p1)+(h(e,f)?j(e.r,e.p1,e.r,e.p0):j(e.r,e.p1,f.r,f.p0)+i(f.r,f.p1)+j(f.r,f.p1,e.r,e.p0))+"Z"}var a=cI,b=cJ,c=cK,d=cc,e=cd;f.radius=function(a){if(!arguments.length)return c;c=d3.functor(a);return f},f.source=function(b){if(!arguments.length)return a;a=d3.functor(b);return f},f.target=function(a){if(!arguments.length)return b;b=d3.functor(a);return f},f.startAngle=function(a){if(!arguments.length)return d;d=d3.functor(a);return f},f.endAngle=function(a){if(!arguments.length)return e;e=d3.functor(a);return f};return f},d3.svg.diagonal=function(){function d(d,e){var f=a.call(this,d,e),g=b.call(this,d,e),h=(f.y+g.y)/2,i=[f,{x:f.x,y:h},{x:g.x,y:h},g];i=i.map(c);return"M"+i[0]+"C"+i[1]+" "+i[2]+" "+i[3]}var a=cI,b=cJ,c=cN;d.source=function(b){if(!arguments.length)return a;a=d3.functor(b);return d},d.target=function(a){if(!arguments.length)return b;b=d3.functor(a);return d},d.projection=function(a){if(!arguments.length)return c;c=a;return d};return d},d3.svg.diagonal.radial=function(){var a=d3.svg.diagonal(),b=cN,c=a.projection;a.projection=function(a){return arguments.length?c(cO(b=a)):b};return a},d3.svg.mouse=function(a){return cQ(a,d3.event)};var cP=/WebKit/.test(navigator.userAgent)?-1:0;d3.svg.touches=function(a){var b=d3.event.touches;return b?d(b).map(function(b){var c=cQ(a,b);c.identifier=b.identifier;return c}):[]},d3.svg.symbol=function(){function c(c,d){return(cT[a.call(this,c,d)]||cT.circle)(b.call(this,c,d))}var a=cS,b=cR;c.type=function(b){if(!arguments.length)return a;a=d3.functor(b);return c},c.size=function(a){if(!arguments.length)return b;b=d3.functor(a);return c};return c};var cT={circle:function(a){var b=Math.sqrt(a/Math.PI);return"M0,"+b+"A"+b+","+b+" 0 1,1 0,"+ -b+"A"+b+","+b+" 0 1,1 0,"+b+"Z"},cross:function(a){var b=Math.sqrt(a/5)/2;return"M"+ -3*b+","+ -b+"H"+ -b+"V"+ -3*b+"H"+b+"V"+ -b+"H"+3*b+"V"+b+"H"+b+"V"+3*b+"H"+ -b+"V"+b+"H"+ -3*b+"Z"},diamond:function(a){var b=Math.sqrt(a/(2*cV)),c=b*cV;return"M0,"+ -b+"L"+c+",0"+" 0,"+b+" "+ -c+",0"+"Z"},square:function(a){var b=Math.sqrt(a)/2;return"M"+ -b+","+ -b+"L"+b+","+ -b+" "+b+","+b+" "+ -b+","+b+"Z"},"triangle-down":function(a){var b=Math.sqrt(a/cU),c=b*cU/2;return"M0,"+c+"L"+b+","+ -c+" "+ -b+","+ -c+"Z"},"triangle-up":function(a){var b=Math.sqrt(a/cU),c=b*cU/2;return"M0,"+ -c+"L"+b+","+c+" "+ -b+","+c+"Z"}};d3.svg.symbolTypes=d3.keys(cT);var cU=Math.sqrt(3),cV=Math.tan(30*Math.PI/180);d3.svg.axis=function(){function j(j){j.each(function(k,l,m){var n=d3.select(this),o=j.delay?function(a){var b=bs;try{bs=j.id;return a.transition().delay(j[m][l].delay).duration(j[m][l].duration).ease(j.ease())}finally{bs=b}}:Object,p=a.ticks.apply(a,g),q=h==null?a.tickFormat.apply(a,g):h,r=cY(a,p,i),s=n.selectAll(".minor").data(r,String),t=s.enter().insert("svg:line","g").attr("class","tick minor").style("opacity",1e-6),u=o(s.exit()).style("opacity",1e-6).remove(),v=o(s).style("opacity",1),w=n.selectAll("g").data(p,String),x=w.enter().insert("svg:g","path").style("opacity",1e-6),y=o(w.exit()).style("opacity",1e-6).remove(),z=o(w).style("opacity",1),A,B=bC(a.range()),C=n.selectAll(".domain").data([0]),D=C.enter().append("svg:path").attr("class","domain"),E=o(C),F=this.__chart__||a;this.__chart__=a.copy(),x.append("svg:line").attr("class","tick"),x.append("svg:text"),z.select("text").text(q);switch(b){case"bottom":A=cW,v.attr("x2",0).attr("y2",d),z.select("line").attr("x2",0).attr("y2",c),z.select("text").attr("x",0).attr("y",Math.max(c,0)+f).attr("dy",".71em").attr("text-anchor","middle"),E.attr("d","M"+B[0]+","+e+"V0H"+B[1]+"V"+e);break;case"top":A=cW,v.attr("x2",0).attr("y2",-d),z.select("line").attr("x2",0).attr("y2",-c),z.select("text").attr("x",0).attr("y",-(Math.max(c,0)+f)).attr("dy","0em").attr("text-anchor","middle"),E.attr("d","M"+B[0]+","+ -e+"V0H"+B[1]+"V"+ -e);break;case"left":A=cX,v.attr("x2",-d).attr("y2",0),z.select("line").attr("x2",-c).attr("y2",0),z.select("text").attr("x",-(Math.max(c,0)+f)).attr("y",0).attr("dy",".32em").attr("text-anchor","end"),E.attr("d","M"+ -e+","+B[0]+"H0V"+B[1]+"H"+ -e);break;case"right":A=cX,v.attr("x2",d).attr("y2",0),z.select("line").attr("x2",c).attr("y2",0),z.select("text").attr("x",Math.max(c,0)+f).attr("y",0).attr("dy",".32em").attr("text-anchor","start"),E.attr("d","M"+e+","+B[0]+"H0V"+B[1]+"H"+e)}x.call(A,F),z.call(A,a),y.call(A,a),t.call(A,F),v.call(A,a),u.call(A,a)})}var a=d3.scale.linear(),b="bottom",c=6,d=6,e=6,f=3,g=[10],h,i=0;j.scale=function(b){if(!arguments.length)return a;a=b;return j},j.orient=function(a){if(!arguments.length)return b;b=a;return j},j.ticks=function(){if(!arguments.length)return g;g=arguments;return j},j.tickFormat=function(a){if(!arguments.length)return h;h=a;return j},j.tickSize=function(a,b,f){if(!arguments.length)return c;var g=arguments.length-1;c=+a,d=g>1?+b:c,e=g>0?+arguments[g]:c;return j},j.tickPadding=function(a){if(!arguments.length)return f;f=+a;return j},j.tickSubdivide=function(a){if(!arguments.length)return i;i=+a;return j};return j},d3.behavior={},d3.behavior.drag=function(){function d(){c.apply(this,arguments),de("dragstart")}function c(){cZ=a,c$=d3.event.target,db=df((c_=this).parentNode),dc=0,da=arguments}function b(){this.on("mousedown.drag",d).on("touchstart.drag",d),d3.select(window).on("mousemove.drag",dg).on("touchmove.drag",dg).on("mouseup.drag",dh,!0).on("touchend.drag",dh,!0).on("click.drag",di,!0)}var a=d3.dispatch("drag","dragstart","dragend");b.on=function(c,d){a[c].add(d);return b};return b};var cZ,c$,c_,da,db,dc,dd;d3.behavior.zoom=function(){function h(){d.apply(this,arguments);var b=dz(),c,e=Date.now();b.length===1&&e-dp<300&&dE(1+Math.floor(a[2]),c=b[0],dn[c.identifier]),dp=e}function g(){d.apply(this,arguments);var b=d3.svg.mouse(dt);dE(d3.event.shiftKey?Math.ceil(a[2]-1):Math.floor(a[2]+1),b,dx(b))}function f(){d.apply(this,arguments),dm||(dm=dx(d3.svg.mouse(dt))),dE(dy()+a[2],d3.svg.mouse(dt),dm)}function e(){d.apply(this,arguments),dl=dx(d3.svg.mouse(dt)),dv=!1,d3.event.preventDefault(),window.focus()}function d(){dq=a,dr=b.zoom.dispatch,ds=d3.event.target,dt=this,du=arguments}function c(){this.on("mousedown.zoom",e).on("mousewheel.zoom",f).on("DOMMouseScroll.zoom",f).on("dblclick.zoom",g).on("touchstart.zoom",h),d3.select(window).on("mousemove.zoom",dB).on("mouseup.zoom",dC).on("touchmove.zoom",dA).on("touchend.zoom",dz).on("click.zoom",dD,!0)}var a=[0,0,0],b=d3.dispatch("zoom");c.on=function(a,d){b[a].add(d);return c};return c};var dk,dl,dm,dn={},dp=0,dq,dr,ds,dt,du,dv,dw})() \ No newline at end of file
diff --git a/media/silhouette.png b/media/devs/silhouette.png
index afa87cd1..afa87cd1 100644
--- a/media/silhouette.png
+++ b/media/devs/silhouette.png
Binary files differ
diff --git a/media/donate.gif b/media/donate.gif
deleted file mode 100644
index d637428b..00000000
--- a/media/donate.gif
+++ /dev/null
Binary files differ
diff --git a/media/logos/apple-touch-icon-114x114.png b/media/logos/apple-touch-icon-114x114.png
new file mode 100644
index 00000000..e6365ee2
--- /dev/null
+++ b/media/logos/apple-touch-icon-114x114.png
Binary files differ
diff --git a/media/logos/apple-touch-icon-57x57.png b/media/logos/apple-touch-icon-57x57.png
new file mode 100644
index 00000000..d2d78262
--- /dev/null
+++ b/media/logos/apple-touch-icon-57x57.png
Binary files differ
diff --git a/media/logos/apple-touch-icon-72x72.png b/media/logos/apple-touch-icon-72x72.png
new file mode 100644
index 00000000..170656e0
--- /dev/null
+++ b/media/logos/apple-touch-icon-72x72.png
Binary files differ
diff --git a/media/logos/archlinux-logo-only.svg b/media/logos/archlinux-logo-only.svg
new file mode 100644
index 00000000..09be94a7
--- /dev/null
+++ b/media/logos/archlinux-logo-only.svg
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ version="1.0"
+ width="200"
+ height="200"
+ id="svg2424">
+ <metadata
+ id="metadata3206">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title></dc:title>
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <defs
+ id="defs2426" />
+ <path
+ d="M 99.982978,9.4299079 C 91.920693,29.201751 87.057917,42.128656 78.081625,61.311918 83.585216,67.143195 90.340614,73.933635 101.31135,81.606952 89.516733,76.756627 81.471355,71.884513 75.458912,66.827107 63.970969,90.7953 45.972667,124.94368 9.4483145,190.57009 38.155229,173.99182 60.408321,163.77892 81.147131,159.87687 c -0.890533,-3.82576 -1.396835,-7.9676 -1.362448,-12.29473 l 0.03406,-0.91557 c 0.455512,-18.39852 10.022891,-32.53528 21.356367,-31.57611 11.33348,0.95916 20.14288,16.65456 19.68737,35.05308 -0.0857,3.45517 -0.47603,6.79044 -1.15808,9.87502 20.51365,4.01105 42.52879,14.20216 70.84729,30.55153 -5.58386,-10.27831 -10.56793,-19.54295 -15.32754,-28.37161 -7.49716,-5.80948 -15.31706,-13.37379 -31.26818,-21.5594 10.96388,2.84479 18.81388,6.13649 24.9328,9.80965 C 120.49649,60.352755 116.57776,48.374114 99.982978,9.4299079 z"
+ id="path2518"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <g
+ transform="matrix(0.8746356,0,0,0.8746356,-26.046795,-109.83508)"
+ id="text2638"
+ style="font-size:8.25130367px;font-style:normal;font-weight:normal;fill:#1793d1;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono">
+ <path
+ d="m 239.84053,313.69965 0,-5.20945 -1.94598,0 0,-0.697 4.68164,0 0,0.697 -1.95404,0 0,5.20945 -0.78162,0"
+ id="path3940"
+ style="fill:#1793d1;fill-opacity:1" />
+ <path
+ d="m 243.39004,313.69965 0,-5.90645 1.17646,0 1.39805,4.18205 c 0.12892,0.38947 0.22293,0.6809 0.28202,0.87429 0.0671,-0.21488 0.1719,-0.53048 0.31426,-0.94681 l 1.41417,-4.10953 1.05155,0 0,5.90645 -0.75341,0 0,-4.94353 -1.71634,4.94353 -0.70506,0 -1.70828,-5.02814 0,5.02814 -0.75342,0"
+ id="path3942"
+ style="fill:#1793d1;fill-opacity:1" />
+ </g>
+</svg>
diff --git a/media/visualize.js b/media/visualize.js
new file mode 100644
index 00000000..d9196d4d
--- /dev/null
+++ b/media/visualize.js
@@ -0,0 +1,129 @@
+function packages_treemap(chart_id, orderings, default_order) {
+ var jq_div = $(chart_id),
+ color = d3.scale.category20();
+ var key_func = function(d) { return d.key; };
+ var value_package_count = function(d) { return d.count; },
+ value_flagged_count = function(d) { return d.flagged; },
+ value_compressed_size = function(d) { return d.csize; },
+ value_installed_size = function(d) { return d.isize; };
+
+ /* tag the function so when we display, we can format filesizes */
+ value_package_count.is_size = value_flagged_count.is_size = false;
+ value_compressed_size.is_size = value_installed_size.is_size = true;
+
+ var treemap = d3.layout.treemap()
+ .size([jq_div.width(), jq_div.height()])
+ /*.sticky(true)*/
+ .value(value_package_count)
+ .sort(function(a, b) { return a.key < b.key; })
+ .children(function(d) { return d.data; });
+
+ var cell_html = function(d) {
+ if (d.children) {
+ return "";
+ }
+ var valuefunc = treemap.value();
+ var value = valuefunc(d);
+ if (valuefunc.is_size && value !== undefined) {
+ value = format_filesize(value);
+ }
+ return "<span>" + d.name + ": " + value + "</span>";
+ };
+
+ var d3_div = d3.select(jq_div.get(0));
+
+ var prop_px = function(prop, offset) {
+ return function(d) {
+ var dist = d[prop] + offset;
+ if (dist > 0) {
+ return dist + "px";
+ }
+ else {
+ return "0px";
+ }
+ };
+ };
+
+ var cell = function() {
+ /* the -1 offset comes from the border width we use in the CSS */
+ this.style("left", prop_px("x", 0)).style("top", prop_px("y", 0))
+ .style("width", prop_px("dx", -1)).style("height", prop_px("dy", -1));
+ };
+
+ var fetch_for_ordering = function(order) {
+ d3.json(order.url, function(json) {
+ var nodes = d3_div.data([json]).selectAll("div")
+ .data(treemap.nodes, key_func);
+ /* start out new nodes in the center of the picture area */
+ var w_center = jq_div.width() / 2;
+ h_center = jq_div.height() / 2;
+ nodes.enter().append("div")
+ .attr("class", "treemap-cell")
+ .attr("title", function(d) { return d.name; })
+ .style("left", w_center + "px").style("top", h_center + "px")
+ .style("width", "0px").style("height", "0px")
+ .style("display", function(d) { return d.children ? "none" : null; })
+ .html(cell_html);
+ nodes.transition().duration(1500)
+ .style("background-color", function(d) { return d.children ? null : color(d[order.color_attr]); })
+ .call(cell);
+ nodes.exit().transition().duration(1500).remove();
+ });
+ };
+
+ /* start the callback for the default order */
+ fetch_for_ordering(orderings[default_order]);
+
+ var make_scale_button = function(name, valuefunc) {
+ var button_id = chart_id + "-" + name;
+ /* upon button click, attach new value function and redraw all boxes
+ * accordingly */
+ d3.select(button_id).on("click", function() {
+ d3_div.selectAll("div")
+ .data(treemap.value(valuefunc), key_func)
+ .html(cell_html)
+ .transition().duration(1500).call(cell);
+
+ /* drop off the '#' sign to convert id to a class prefix */
+ d3.selectAll("." + chart_id.substring(1) + "-scaleby")
+ .classed("active", false);
+ d3.select(button_id).classed("active", true);
+ });
+ };
+
+ /* each scale button tweaks our value, e.g. net size function */
+ make_scale_button("count", value_package_count);
+ make_scale_button("flagged", value_flagged_count);
+ make_scale_button("csize", value_compressed_size);
+ make_scale_button("isize", value_installed_size);
+
+ var make_group_button = function(name, order) {
+ var button_id = chart_id + "-" + name;
+ d3.select(button_id).on("click", function() {
+ fetch_for_ordering(order);
+
+ /* drop off the '#' sign to convert id to a class prefix */
+ d3.selectAll("." + chart_id.substring(1) + "-groupby")
+ .classed("active", false);
+ d3.select(button_id).classed("active", true);
+ });
+ };
+
+ $.each(orderings, function(k, v) {
+ make_group_button(k, v);
+ });
+
+ var resize_timeout = null;
+ var real_resize = function() {
+ resize_timeout = null;
+ d3_div.selectAll("div")
+ .data(treemap.size([jq_div.width(), jq_div.height()]), key_func)
+ .call(cell);
+ };
+ $(window).resize(function() {
+ if (resize_timeout) {
+ clearTimeout(resize_timeout);
+ }
+ resize_timeout = setTimeout(real_resize, 200);
+ });
+}
diff --git a/mirrors/admin.py b/mirrors/admin.py
index 0632872d..3786d8d2 100644
--- a/mirrors/admin.py
+++ b/mirrors/admin.py
@@ -33,14 +33,15 @@ class MirrorUrlInlineAdmin(admin.TabularInline):
extra = 3
# ripped off from django.forms.fields, adding netmask ability
-ipv4nm_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}(/(\d|[1-2]\d|3[0-2])){0,1}$')
+IPV4NM_RE = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}(/(\d|[1-2]\d|3[0-2])){0,1}$')
+
class IPAddressNetmaskField(forms.fields.RegexField):
default_error_messages = {
'invalid': u'Enter a valid IPv4 address, possibly including netmask.',
}
def __init__(self, *args, **kwargs):
- super(IPAddressNetmaskField, self).__init__(ipv4nm_re, *args, **kwargs)
+ super(IPAddressNetmaskField, self).__init__(IPV4NM_RE, *args, **kwargs)
class MirrorRsyncForm(forms.ModelForm):
class Meta:
diff --git a/mirrors/utils.py b/mirrors/utils.py
index 686ec581..8518b3ba 100644
--- a/mirrors/utils.py
+++ b/mirrors/utils.py
@@ -40,7 +40,8 @@ def get_mirror_statuses(cutoff=default_cutoff):
last_sync=Max('logs__last_sync'),
last_check=Max('logs__check_time'),
duration_avg=Avg('logs__duration'),
- duration_stddev=StdDev('logs__duration')
+ #duration_stddev=StdDev('logs__duration')
+ duration_stddev=Max('logs__duration')
).order_by('-last_sync', '-duration_avg')
# The Django ORM makes it really hard to get actual average delay in the
diff --git a/mirrors/views.py b/mirrors/views.py
index 6135cee3..417e26ee 100644
--- a/mirrors/views.py
+++ b/mirrors/views.py
@@ -57,7 +57,7 @@ def find_mirrors(request, countries=None, protocols=None, use_status=False,
is_download=True).values_list('protocol', flat=True)
qset = MirrorUrl.objects.select_related().filter(
protocol__protocol__in=protocols,
- mirror__public=True, mirror__active=True, mirror__isos=True
+ mirror__public=True, mirror__active=True,
)
if countries and 'all' not in countries:
qset = qset.filter(Q(country__in=countries) |
diff --git a/news/views.py b/news/views.py
index 990ee154..7ac009ba 100644
--- a/news/views.py
+++ b/news/views.py
@@ -32,7 +32,7 @@ def news_list(request):
class NewsForm(forms.ModelForm):
class Meta:
model = News
- exclude=('id', 'slug', 'author', 'postdate')
+ exclude = ('id', 'slug', 'author', 'postdate')
def find_unique_slug(newsitem):
'''Attempt to find a unique slug for this news item.'''
diff --git a/packages/management/__init__.py b/packages/management/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/packages/management/__init__.py
diff --git a/packages/management/commands/__init__.py b/packages/management/commands/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/packages/management/commands/__init__.py
diff --git a/packages/management/commands/populate_signoffs.py b/packages/management/commands/populate_signoffs.py
new file mode 100644
index 00000000..ce5ec734
--- /dev/null
+++ b/packages/management/commands/populate_signoffs.py
@@ -0,0 +1,88 @@
+# -*- coding: utf-8 -*-
+"""
+populate_signoffs command
+
+Pull the latest commit message from SVN for a given package that is
+signoff-eligible and does not have an existing comment attached.
+
+Usage: ./manage.py populate_signoffs
+"""
+
+from datetime import datetime
+import logging
+import subprocess
+import sys
+from xml.etree.ElementTree import XML
+
+from django.conf import settings
+from django.contrib.auth.models import User
+from django.core.management.base import NoArgsCommand
+
+from ...models import SignoffSpecification
+from ...utils import get_signoff_groups
+from devel.utils import UserFinder
+
+logging.basicConfig(
+ level=logging.INFO,
+ format='%(asctime)s -> %(levelname)s: %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S',
+ stream=sys.stderr)
+logger = logging.getLogger()
+
+class Command(NoArgsCommand):
+ help = "Pull the latest commit message from SVN for a given package that is signoff-eligible and does not have an existing comment attached"
+
+ def handle_noargs(self, **options):
+ v = int(options.get('verbosity', None))
+ if v == 0:
+ logger.level = logging.ERROR
+ elif v == 1:
+ logger.level = logging.INFO
+ elif v == 2:
+ logger.level = logging.DEBUG
+
+ return add_signoff_comments()
+
+def svn_log(pkgbase, repo):
+ path = '%s%s/%s/trunk/' % (settings.SVN_BASE_URL, repo.svn_root, pkgbase)
+ cmd = ['svn', 'log', '--limit=1', '--xml', path]
+ log_data = subprocess.check_output(cmd)
+ # the XML format is very very simple, especially with only one revision
+ xml = XML(log_data)
+ revision = int(xml.find('logentry').get('revision'))
+ date = datetime.strptime(xml.findtext('logentry/date'),
+ '%Y-%m-%dT%H:%M:%S.%fZ')
+ return {
+ 'revision': revision,
+ 'date': date,
+ 'author': xml.findtext('logentry/author'),
+ 'message': xml.findtext('logentry/msg'),
+ }
+
+def create_specification(package, log, finder):
+ trimmed_message = log['message'].strip()
+ spec = SignoffSpecification(pkgbase=package.pkgbase,
+ pkgver=package.pkgver, pkgrel=package.pkgrel,
+ epoch=package.epoch, arch=package.arch, repo=package.repo,
+ comments=trimmed_message)
+ spec.user = finder.find_by_username(log['author'])
+ return spec
+
+def add_signoff_comments():
+ logger.info("getting all signoff groups")
+ groups = get_signoff_groups()
+ logger.info("%d signoff groups found", len(groups))
+
+ finder = UserFinder()
+
+ for group in groups:
+ if not group.default_spec:
+ continue
+
+ logger.debug("getting SVN log for %s (%s)", group.pkgbase, group.repo)
+ log = svn_log(group.pkgbase, group.repo)
+ logger.info("creating spec with SVN message for %s", group.pkgbase)
+ spec = create_specification(group.packages[0], log, finder)
+ spec.save()
+
+# vim: set ts=4 sw=4 et:
diff --git a/packages/management/commands/signoff_report.py b/packages/management/commands/signoff_report.py
new file mode 100644
index 00000000..f822c8ad
--- /dev/null
+++ b/packages/management/commands/signoff_report.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+"""
+signoff_report command
+
+Send an email summarizing the state of outstanding signoffs for the given
+repository.
+
+Usage: ./manage.py signoff_report <email> <repository>
+"""
+
+from django.core.mail import send_mail
+from django.core.urlresolvers import reverse
+from django.core.management.base import BaseCommand, CommandError
+from django.contrib.auth.models import User
+from django.contrib.sites.models import Site
+from django.db.models import Count
+from django.template import loader, Context
+
+from collections import namedtuple
+from datetime import datetime, timedelta
+import logging
+from operator import attrgetter
+import sys
+
+from main.models import Repo
+from packages.models import Signoff
+from packages.utils import get_signoff_groups
+
+logging.basicConfig(
+ level=logging.INFO,
+ format='%(asctime)s -> %(levelname)s: %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S',
+ stream=sys.stderr)
+logger = logging.getLogger()
+
+class Command(BaseCommand):
+ args = "<email> <repository>"
+ help = "Send a signoff report for the given repository."
+
+ def handle(self, *args, **options):
+ v = int(options.get('verbosity', None))
+ if v == 0:
+ logger.level = logging.ERROR
+ elif v == 1:
+ logger.level = logging.INFO
+ elif v == 2:
+ logger.level = logging.DEBUG
+
+ if len(args) != 2:
+ raise CommandError("email and repository must be provided")
+
+ return generate_report(args[0], args[1])
+
+def generate_report(email, repo_name):
+ repo = Repo.objects.get(name__iexact=repo_name)
+ # Collect all existing signoffs for these packages
+ signoff_groups = sorted(get_signoff_groups([repo]),
+ key=attrgetter('target_repo', 'arch', 'pkgbase'))
+ disabled = []
+ bad = []
+ complete = []
+ incomplete = []
+ new = []
+ old = []
+
+ new_hours = 24
+ old_days = 14
+ now = datetime.utcnow()
+ new_cutoff = now - timedelta(hours=new_hours)
+ old_cutoff = now - timedelta(days=old_days)
+
+ if len(signoff_groups) == 0:
+ # no need to send an email at all
+ return
+
+ for group in signoff_groups:
+ spec = group.specification
+ if spec.known_bad:
+ bad.append(group)
+ elif not spec.enabled:
+ disabled.append(group)
+ elif group.approved():
+ complete.append(group)
+ else:
+ incomplete.append(group)
+
+ if group.package.last_update > new_cutoff:
+ new.append(group)
+ if group.package.last_update < old_cutoff:
+ old.append(group)
+
+ old.sort(key=attrgetter('last_update'))
+
+ proto = 'https'
+ domain = Site.objects.get_current().domain
+ signoffs_url = '%s://%s%s' % (proto, domain, reverse('package-signoffs'))
+
+ # and the fun bit
+ Leader = namedtuple('Leader', ['user', 'count'])
+ leaders = Signoff.objects.filter(created__gt=new_cutoff,
+ revoked__isnull=True).values_list('user').annotate(
+ signoff_count=Count('pk')).order_by('-signoff_count')[:5]
+ users = User.objects.in_bulk([l[0] for l in leaders])
+ leaders = (Leader(users[l[0]], l[1]) for l in leaders)
+
+ subject = 'Signoff report for [%s]' % repo.name.lower()
+ t = loader.get_template('packages/signoff_report.txt')
+ c = Context({
+ 'repo': repo,
+ 'signoffs_url': signoffs_url,
+ 'disabled': disabled,
+ 'bad': bad,
+ 'all': signoff_groups,
+ 'incomplete': incomplete,
+ 'complete': complete,
+ 'new': new,
+ 'new_hours': new_hours,
+ 'old': old,
+ 'old_days': old_days,
+ 'leaders': leaders,
+ })
+ from_addr = 'Parabola Website Notification <nobody@parabolagnulinux.org>'
+ send_mail(subject, t.render(c), from_addr, [email])
+
+# vim: set ts=4 sw=4 et:
diff --git a/packages/migrations/0010_auto__add_signoffspecification.py b/packages/migrations/0010_auto__add_signoffspecification.py
new file mode 100644
index 00000000..da24824e
--- /dev/null
+++ b/packages/migrations/0010_auto__add_signoffspecification.py
@@ -0,0 +1,183 @@
+# encoding: utf-8
+import datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ db.create_table('packages_signoffspecification', (
+ ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('pkgbase', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
+ ('pkgver', self.gf('django.db.models.fields.CharField')(max_length=255)),
+ ('pkgrel', self.gf('django.db.models.fields.CharField')(max_length=255)),
+ ('epoch', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
+ ('arch', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Arch'])),
+ ('repo', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Repo'])),
+ ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
+ ('created', self.gf('django.db.models.fields.DateTimeField')()),
+ ('required', self.gf('django.db.models.fields.PositiveIntegerField')(default=2)),
+ ('enabled', self.gf('django.db.models.fields.BooleanField')(default=True)),
+ ('known_bad', self.gf('django.db.models.fields.BooleanField')(default=False)),
+ ('comments', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
+ ))
+ db.send_create_signal('packages', ['SignoffSpecification'])
+
+
+ def backwards(self, orm):
+ db.delete_table('packages_signoffspecification')
+
+
+ models = {
+ 'auth.group': {
+ 'Meta': {'object_name': 'Group'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
+ 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
+ },
+ 'auth.permission': {
+ 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
+ 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
+ },
+ 'auth.user': {
+ 'Meta': {'object_name': 'User'},
+ 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
+ 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
+ },
+ 'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ 'main.arch': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"},
+ 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
+ },
+ 'main.package': {
+ 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"},
+ 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}),
+ 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'last_update': ('django.db.models.fields.DateTimeField', [], {}),
+ 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
+ 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
+ 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}),
+ 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
+ },
+ 'main.repo': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"},
+ 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
+ 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
+ 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ 'packages.conflict': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Conflict'},
+ 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'conflicts'", 'to': "orm['main.Package']"}),
+ 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'})
+ },
+ 'packages.license': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'License'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'licenses'", 'to': "orm['main.Package']"})
+ },
+ 'packages.packagegroup': {
+ 'Meta': {'object_name': 'PackageGroup'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groups'", 'to': "orm['main.Package']"})
+ },
+ 'packages.packagerelation': {
+ 'Meta': {'unique_together': "(('pkgbase', 'user', 'type'),)", 'object_name': 'PackageRelation'},
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
+ 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_relations'", 'to': "orm['auth.User']"})
+ },
+ 'packages.provision': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Provision'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'provides'", 'to': "orm['main.Package']"}),
+ 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'})
+ },
+ 'packages.replacement': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Replacement'},
+ 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replaces'", 'to': "orm['main.Package']"}),
+ 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'})
+ },
+ 'packages.signoff': {
+ 'Meta': {'object_name': 'Signoff'},
+ 'arch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Arch']"}),
+ 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Repo']"}),
+ 'revoked': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_signoffs'", 'to': "orm['auth.User']"})
+ },
+ 'packages.signoffspecification': {
+ 'Meta': {'object_name': 'SignoffSpecification'},
+ 'arch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Arch']"}),
+ 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'known_bad': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Repo']"}),
+ 'required': ('django.db.models.fields.PositiveIntegerField', [], {'default': '2'}),
+ 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
+ }
+ }
+
+ complete_apps = ['packages']
diff --git a/packages/migrations/0011_auto__chg_field_signoffspecification_user.py b/packages/migrations/0011_auto__chg_field_signoffspecification_user.py
new file mode 100644
index 00000000..f6e3cdd9
--- /dev/null
+++ b/packages/migrations/0011_auto__chg_field_signoffspecification_user.py
@@ -0,0 +1,165 @@
+# encoding: utf-8
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ db.alter_column('packages_signoffspecification', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True))
+
+ def backwards(self, orm):
+ db.alter_column('packages_signoffspecification', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['auth.User']))
+
+ models = {
+ 'auth.group': {
+ 'Meta': {'object_name': 'Group'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
+ 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
+ },
+ 'auth.permission': {
+ 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
+ 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
+ },
+ 'auth.user': {
+ 'Meta': {'object_name': 'User'},
+ 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
+ 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
+ },
+ 'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ 'main.arch': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"},
+ 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
+ },
+ 'main.package': {
+ 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"},
+ 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}),
+ 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_size': ('main.models.PositiveBigIntegerField', [], {}),
+ 'last_update': ('django.db.models.fields.DateTimeField', [], {}),
+ 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
+ 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
+ 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}),
+ 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
+ },
+ 'main.repo': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"},
+ 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
+ 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
+ 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ 'packages.conflict': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Conflict'},
+ 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'conflicts'", 'to': "orm['main.Package']"}),
+ 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'})
+ },
+ 'packages.license': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'License'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'licenses'", 'to': "orm['main.Package']"})
+ },
+ 'packages.packagegroup': {
+ 'Meta': {'object_name': 'PackageGroup'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groups'", 'to': "orm['main.Package']"})
+ },
+ 'packages.packagerelation': {
+ 'Meta': {'unique_together': "(('pkgbase', 'user', 'type'),)", 'object_name': 'PackageRelation'},
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
+ 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_relations'", 'to': "orm['auth.User']"})
+ },
+ 'packages.provision': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Provision'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'provides'", 'to': "orm['main.Package']"}),
+ 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'})
+ },
+ 'packages.replacement': {
+ 'Meta': {'ordering': "['name']", 'object_name': 'Replacement'},
+ 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replaces'", 'to': "orm['main.Package']"}),
+ 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'})
+ },
+ 'packages.signoff': {
+ 'Meta': {'object_name': 'Signoff'},
+ 'arch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Arch']"}),
+ 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Repo']"}),
+ 'revoked': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_signoffs'", 'to': "orm['auth.User']"})
+ },
+ 'packages.signoffspecification': {
+ 'Meta': {'object_name': 'SignoffSpecification'},
+ 'arch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Arch']"}),
+ 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'known_bad': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
+ 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Repo']"}),
+ 'required': ('django.db.models.fields.PositiveIntegerField', [], {'default': '2'}),
+ 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
+ }
+ }
+
+ complete_apps = ['packages']
diff --git a/packages/models.py b/packages/models.py
index d2fe1878..0d02ab31 100644
--- a/packages/models.py
+++ b/packages/models.py
@@ -1,3 +1,5 @@
+from collections import namedtuple
+
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.contrib.auth.models import User
@@ -38,6 +40,90 @@ class PackageRelation(models.Model):
class Meta:
unique_together = (('pkgbase', 'user', 'type'),)
+
+class SignoffSpecificationManager(models.Manager):
+ def get_from_package(self, pkg):
+ '''Utility method to pull all relevant name-version fields from a
+ package and get a matching signoff specification.'''
+ return self.get(
+ pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel,
+ epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo)
+
+ def get_or_default_from_package(self, pkg):
+ '''utility method to pull all relevant name-version fields from a
+ package and get a matching signoff specification, or return the default
+ base case.'''
+ try:
+ return self.get(
+ pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel,
+ epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo)
+ except SignoffSpecification.DoesNotExist:
+ return DEFAULT_SIGNOFF_SPEC
+
+class SignoffSpecification(models.Model):
+ '''
+ A specification for the signoff policy for this particular revision of a
+ package. The default is requiring two signoffs for a given package. These
+ are created only if necessary; e.g., if one wanted to override the
+ required=2 attribute, otherwise a sane default object is used.
+ '''
+ pkgbase = models.CharField(max_length=255, db_index=True)
+ pkgver = models.CharField(max_length=255)
+ pkgrel = models.CharField(max_length=255)
+ epoch = models.PositiveIntegerField(default=0)
+ arch = models.ForeignKey('main.Arch')
+ repo = models.ForeignKey('main.Repo')
+ user = models.ForeignKey(User, null=True)
+ created = models.DateTimeField(editable=False)
+ required = models.PositiveIntegerField(default=2,
+ help_text="How many signoffs are required for this package?")
+ enabled = models.BooleanField(default=True,
+ help_text="Is this package eligible for signoffs?")
+ known_bad = models.BooleanField(default=False,
+ help_text="Is package is known to be broken in some way?")
+ comments = models.TextField(null=True, blank=True)
+
+ objects = SignoffSpecificationManager()
+
+ @property
+ def full_version(self):
+ if self.epoch > 0:
+ return u'%d:%s-%s' % (self.epoch, self.pkgver, self.pkgrel)
+ return u'%s-%s' % (self.pkgver, self.pkgrel)
+
+ def __unicode__(self):
+ return u'%s-%s' % (self.pkgbase, self.full_version)
+
+
+# fake default signoff spec when we don't have a persisted one in the database
+FakeSignoffSpecification = namedtuple('FakeSignoffSpecification',
+ ('required', 'enabled', 'known_bad', 'comments'))
+DEFAULT_SIGNOFF_SPEC = FakeSignoffSpecification(2, True, False, u'')
+
+
+class SignoffManager(models.Manager):
+ def get_from_package(self, pkg, user, revoked=False):
+ '''Utility method to pull all relevant name-version fields from a
+ package and get a matching signoff.'''
+ not_revoked = not revoked
+ return self.get(
+ pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel,
+ epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo,
+ revoked__isnull=not_revoked, user=user)
+
+ def get_or_create_from_package(self, pkg, user):
+ '''Utility method to pull all relevant name-version fields from a
+ package and get or create a matching signoff.'''
+ return self.get_or_create(
+ pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel,
+ epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo,
+ revoked=None, user=user)
+
+ def for_package(self, pkg):
+ return self.select_related('user').filter(
+ pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel,
+ epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo)
+
class Signoff(models.Model):
'''
A signoff for a package (by pkgbase) at a given point in time. These are
@@ -55,14 +141,14 @@ class Signoff(models.Model):
revoked = models.DateTimeField(null=True)
comments = models.TextField(null=True, blank=True)
- REQUIRED = 2
+ objects = SignoffManager()
@property
def packages(self):
# TODO: delayed import to avoid circular reference
from main.models import Package
return Package.objects.normal().filter(pkgbase=self.pkgbase,
- pkgver=self.pkgver, pkgrel=self.pkgrel, epoch=pkg.epoch,
+ pkgver=self.pkgver, pkgrel=self.pkgrel, epoch=self.epoch,
arch=self.arch, repo=self.repo)
@property
@@ -72,8 +158,11 @@ class Signoff(models.Model):
return u'%s-%s' % (self.pkgver, self.pkgrel)
def __unicode__(self):
- return u'%s-%s: %s' % (
- self.pkgbase, self.full_version, self.user)
+ revoked = u''
+ if self.revoked:
+ revoked = u' (revoked)'
+ return u'%s-%s: %s%s' % (
+ self.pkgbase, self.full_version, self.user, revoked)
class PackageGroup(models.Model):
'''
@@ -150,9 +239,8 @@ def remove_inactive_maintainers(sender, instance, created, **kwargs):
post_save.connect(remove_inactive_maintainers, sender=User,
dispatch_uid="packages.models")
-pre_save.connect(set_created_field, sender=PackageRelation,
- dispatch_uid="packages.models")
-pre_save.connect(set_created_field, sender=Signoff,
- dispatch_uid="packages.models")
+for sender in (PackageRelation, SignoffSpecification, Signoff):
+ pre_save.connect(set_created_field, sender=sender,
+ dispatch_uid="packages.models")
# vim: set ts=4 sw=4 et:
diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py
index 45a534c8..66fb5e7c 100644
--- a/packages/templatetags/package_extras.py
+++ b/packages/templatetags/package_extras.py
@@ -1,4 +1,4 @@
-from urllib import urlencode, quote as urlquote
+from urllib import urlencode, quote as urlquote, unquote
try:
from urlparse import parse_qs
except ImportError:
@@ -13,6 +13,17 @@ def link_encode(url, query, doseq=False):
data = urlencode(query, doseq).replace('&', '&amp;')
return "%s?%s" % (url, data)
+@register.filter
+def url_unquote(original_url):
+ try:
+ url = original_url
+ if isinstance(url, unicode):
+ url = url.encode('ascii')
+ url = unquote(url).decode('utf-8')
+ return url
+ except UnicodeError:
+ return original_url
+
class BuildQueryStringNode(template.Node):
def __init__(self, sortfield):
self.sortfield = sortfield
@@ -62,16 +73,38 @@ def userpkgs(user):
return ''
@register.simple_tag
+def get_download_link(package):
+ parts = {
+ "repo": package.repo.name.lower(),
+ "arch": package.arch.name,
+ "pkgfile": package.filename
+ }
+ if parts["arch"] == "any":
+ parts["arch"] = "i686"
+ linkbase = "//repo.parabolagnulinux.org/%(repo)s/os/%(arch)s/%(pkgfile)s"
+ return linkbase % parts
+
+@register.simple_tag
def get_wiki_link(package):
- url = "http://wiki.parabolagnulinux.org/Special:Search"
+ url = "//wiki.parabolagnulinux.org/index.php"
data = {
+ 'title': "Special:Search",
'search': package.pkgname,
}
return link_encode(url, data)
@register.simple_tag
+def svn_arch(package):
+ repo = package.repo.name.lower()
+ return svn_link(package, "repos/%s-%s" % (repo, package.arch.name))
+
+@register.simple_tag
+def svn_trunk(package):
+ return svn_link(package, "trunk")
+
+@register.simple_tag
def bugs_list(package):
- url = "https://bugs.parabolagnulinux.org/bugs/issue"
+ url = "//bugs.parabolagnulinux.org/bugs/issue"
data = {
'@action': 'search',
'title': package.pkgname,
@@ -80,7 +113,7 @@ def bugs_list(package):
@register.simple_tag
def bug_report(package):
- url = "https://bugs.parabolagnulinux.org/bugs/issue"
+ url = "//bugs.parabolagnulinux.org/bugs/issue"
data = {
'@template': 'item',
'keyword': 'packages',
@@ -98,5 +131,5 @@ def flag_unfree(package):
'priority': 'critical',
'title': '[%s] Please put your reasons here (register first if you haven\'t)' % package.pkgname,
}
- return link_encode(url, data)
+ return "//bugs.parabolagnulinux.org/bugs/issue?%s" % urlencode(data)
# vim: set ts=4 sw=4 et:
diff --git a/packages/urls.py b/packages/urls.py
index d7d01170..1f25e3fd 100644
--- a/packages/urls.py
+++ b/packages/urls.py
@@ -10,12 +10,15 @@ package_patterns = patterns('packages.views',
(r'^unflag/$', 'unflag'),
(r'^unflag/all/$', 'unflag_all'),
(r'^signoff/$', 'signoff_package'),
+ (r'^signoff/revoke/$', 'signoff_package', {'revoke': True}),
+ (r'^signoff/options/$', 'signoff_options'),
(r'^download/$', 'download'),
)
urlpatterns = patterns('packages.views',
(r'^flaghelp/$', 'flaghelp'),
(r'^signoffs/$', 'signoffs', {}, 'package-signoffs'),
+ (r'^signoffs/json/$', 'signoffs_json', {}, 'package-signoffs-json'),
(r'^update/$', 'update'),
(r'^$', 'search', {}, 'packages-search'),
diff --git a/packages/utils.py b/packages/utils.py
index c8c1f8a6..82d47bc7 100644
--- a/packages/utils.py
+++ b/packages/utils.py
@@ -2,11 +2,13 @@ from collections import defaultdict
from operator import itemgetter
from django.db import connection
-from django.db.models import Count, Max
+from django.db.models import Count, Max, F
+from django.contrib.auth.models import User
-from main.models import Package
-from main.utils import cache_function
-from .models import PackageGroup, PackageRelation, Signoff
+from main.models import Package, Arch, Repo
+from main.utils import cache_function, groupby_preserve_order, PackageStandin
+from .models import (PackageGroup, PackageRelation,
+ SignoffSpecification, Signoff, DEFAULT_SIGNOFF_SPEC)
@cache_function(300)
def get_group_info(include_arches=None):
@@ -47,6 +49,20 @@ def get_group_info(include_arches=None):
groups.extend(val.itervalues())
return sorted(groups, key=itemgetter('name', 'arch'))
+def get_split_packages_info():
+ '''Return info on split packages that do not have an actual package name
+ matching the split pkgbase.'''
+ pkgnames = Package.objects.values('pkgname')
+ split_pkgs = Package.objects.exclude(pkgname=F('pkgbase')).exclude(
+ pkgbase__in=pkgnames).values('pkgbase', 'repo', 'arch').annotate(
+ last_update=Max('last_update'))
+ all_arches = Arch.objects.in_bulk(set(s['arch'] for s in split_pkgs))
+ all_repos = Repo.objects.in_bulk(set(s['repo'] for s in split_pkgs))
+ for split in split_pkgs:
+ split['arch'] = all_arches[split['arch']]
+ split['repo'] = all_repos[split['repo']]
+ return split_pkgs
+
class Difference(object):
def __init__(self, pkgname, repo, pkg_a, pkg_b):
self.pkgname = pkgname
@@ -90,7 +106,11 @@ SELECT p.id, q.id
AND p.arch_id != q.arch_id
AND p.id != q.id
)
- WHERE p.arch_id IN (%s, %s)
+ WHERE p.arch_id in (%s, %s)
+ AND (
+ q.arch_id in (%s, %s)
+ OR q.id IS NULL
+ )
AND (
q.id IS NULL
OR p.pkgver != q.pkgver
@@ -99,7 +119,7 @@ SELECT p.id, q.id
)
"""
cursor = connection.cursor()
- cursor.execute(sql, [arch_a.id, arch_b.id])
+ cursor.execute(sql, [arch_a.id, arch_b.id, arch_a.id, arch_b.id])
results = cursor.fetchall()
# column A will always have a value, column B might be NULL
to_fetch = [row[0] for row in results]
@@ -126,6 +146,7 @@ SELECT p.id, q.id
differences.sort(key=lambda a: (a.repo.name, a.pkgname))
return differences
+
def get_wrong_permissions():
sql = """
SELECT DISTINCT id
@@ -148,11 +169,128 @@ SELECT DISTINCT id
id__in=to_fetch)
return relations
-def get_current_signoffs():
- '''Returns a mapping of pkgbase -> signoff objects.'''
- sql = """
+
+def attach_maintainers(packages):
+ '''Given a queryset or something resembling it of package objects, find all
+ the maintainers and attach them to the packages to prevent N+1 query
+ cascading.'''
+ packages = list(packages)
+ pkgbases = set(p.pkgbase for p in packages)
+ rels = PackageRelation.objects.filter(type=PackageRelation.MAINTAINER,
+ pkgbase__in=pkgbases).values_list('pkgbase', 'user_id').distinct()
+
+ # get all the user objects we will need
+ user_ids = set(rel[1] for rel in rels)
+ users = User.objects.in_bulk(user_ids)
+
+ # now build a pkgbase -> [maintainers...] map
+ maintainers = defaultdict(list)
+ for rel in rels:
+ maintainers[rel[0]].append(users[rel[1]])
+
+ annotated = []
+ # and finally, attach the maintainer lists on the original packages
+ for package in packages:
+ package.maintainers = maintainers[package.pkgbase]
+ annotated.append(package)
+
+ return annotated
+
+
+def approved_by_signoffs(signoffs, spec):
+ if signoffs:
+ good_signoffs = sum(1 for s in signoffs if not s.revoked)
+ return good_signoffs >= spec.required
+ return False
+
+class PackageSignoffGroup(object):
+ '''Encompasses all packages in testing with the same pkgbase.'''
+ def __init__(self, packages):
+ if len(packages) == 0:
+ raise Exception
+ self.packages = packages
+ self.user = None
+ self.target_repo = None
+ self.signoffs = set()
+ self.specification = DEFAULT_SIGNOFF_SPEC
+ self.default_spec = True
+
+ first = packages[0]
+ self.pkgbase = first.pkgbase
+ self.arch = first.arch
+ self.repo = first.repo
+ self.version = ''
+ self.last_update = first.last_update
+ self.packager = first.packager
+ self.maintainers = first.maintainers
+
+ version = first.full_version
+ if all(version == pkg.full_version for pkg in packages):
+ self.version = version
+
+ @property
+ def package(self):
+ '''Try and return a relevant single package object representing this
+ group. Start by seeing if there is only one package, then look for the
+ matching package by name, finally falling back to a standin package
+ object.'''
+ if len(self.packages) == 1:
+ return self.packages[0]
+
+ same_pkgs = [p for p in self.packages if p.pkgname == p.pkgbase]
+ if same_pkgs:
+ return same_pkgs[0]
+
+ return PackageStandin(self.packages[0])
+
+ def find_signoffs(self, all_signoffs):
+ '''Look through a list of Signoff objects for ones matching this
+ particular group and store them on the object.'''
+ for s in all_signoffs:
+ if s.pkgbase != self.pkgbase:
+ continue
+ if self.version and not s.full_version == self.version:
+ continue
+ if s.arch_id == self.arch.id and s.repo_id == self.repo.id:
+ self.signoffs.add(s)
+
+ def find_specification(self, specifications):
+ for spec in specifications:
+ if spec.pkgbase != self.pkgbase:
+ continue
+ if self.version and not spec.full_version == self.version:
+ continue
+ if spec.arch_id == self.arch.id and spec.repo_id == self.repo.id:
+ self.specification = spec
+ self.default_spec = False
+ return
+
+ def approved(self):
+ return approved_by_signoffs(self.signoffs, self.specification)
+
+ @property
+ def completed(self):
+ return sum(1 for s in self.signoffs if not s.revoked)
+
+ @property
+ def required(self):
+ return self.specification.required
+
+ def user_signed_off(self, user=None):
+ '''Did a given user signoff on this package? user can be passed as an
+ argument, or attached to the group object itself so this can be called
+ from a template.'''
+ if user is None:
+ user = self.user
+ return user in (s.user for s in self.signoffs if not s.revoked)
+
+ def __unicode__(self):
+ return u'%s-%s (%s): %d' % (
+ self.pkgbase, self.version, self.arch, len(self.signoffs))
+
+_SQL_SPEC_OR_SIGNOFF = """
SELECT DISTINCT s.id
- FROM packages_signoff s
+ FROM %s s
JOIN packages p ON (
s.pkgbase = p.pkgbase
AND s.pkgver = p.pkgver
@@ -161,15 +299,88 @@ SELECT DISTINCT s.id
AND s.arch_id = p.arch_id
AND s.repo_id = p.repo_id
)
- JOIN repos r ON p.repo_id = r.id
- WHERE r.testing = %s
+ AND p.repo_id IN (%s)
"""
+
+def get_current_signoffs(repos):
+ '''Returns a mapping of pkgbase -> signoff objects for the given repos.'''
cursor = connection.cursor()
- cursor.execute(sql, [True])
+ # query pre-process- fill in table name and placeholders for IN
+ sql = _SQL_SPEC_OR_SIGNOFF % ('packages_signoff',
+ ','.join(['%s' for r in repos]))
+ cursor.execute(sql, [r.pk for r in repos])
+
results = cursor.fetchall()
# fetch all of the returned signoffs by ID
to_fetch = [row[0] for row in results]
signoffs = Signoff.objects.select_related('user').in_bulk(to_fetch)
return signoffs.values()
+def get_current_specifications(repos):
+ '''Returns a mapping of pkgbase -> signoff specification objects for the
+ given repos.'''
+ cursor = connection.cursor()
+ sql = _SQL_SPEC_OR_SIGNOFF % ('packages_signoffspecification',
+ ','.join(['%s' for r in repos]))
+ cursor.execute(sql, [r.pk for r in repos])
+
+ results = cursor.fetchall()
+ to_fetch = [row[0] for row in results]
+ return SignoffSpecification.objects.in_bulk(to_fetch).values()
+
+def get_target_repo_map(repos):
+ sql = """
+SELECT DISTINCT p1.pkgbase, r.name
+ FROM packages p1
+ JOIN repos r ON p1.repo_id = r.id
+ JOIN packages p2 ON p1.pkgbase = p2.pkgbase
+ WHERE r.staging = %s
+ AND r.testing = %s
+ AND p2.repo_id IN (
+ """
+ sql += ','.join(['%s' for r in repos])
+ sql += ")"
+
+ params = [False, False]
+ params.extend(r.pk for r in repos)
+
+ cursor = connection.cursor()
+ cursor.execute(sql, params)
+ return dict(cursor.fetchall())
+
+def get_signoff_groups(repos=None, user=None):
+ if repos is None:
+ repos = Repo.objects.filter(testing=True)
+ repo_ids = [r.pk for r in repos]
+
+ test_pkgs = Package.objects.select_related(
+ 'arch', 'repo', 'packager').filter(repo__in=repo_ids)
+ packages = test_pkgs.order_by('pkgname')
+ packages = attach_maintainers(packages)
+
+ # Filter by user if asked to do so
+ if user is not None:
+ packages = [p for p in packages if user == p.packager
+ or user in p.maintainers]
+
+ # Collect all pkgbase values in testing repos
+ pkgtorepo = get_target_repo_map(repos)
+
+ # Collect all possible signoffs and specifications for these packages
+ signoffs = get_current_signoffs(repos)
+ specs = get_current_specifications(repos)
+
+ same_pkgbase_key = lambda x: (x.repo.name, x.arch.name, x.pkgbase)
+ grouped = groupby_preserve_order(packages, same_pkgbase_key)
+ signoff_groups = []
+ for group in grouped:
+ signoff_group = PackageSignoffGroup(group)
+ signoff_group.target_repo = pkgtorepo.get(signoff_group.pkgbase,
+ "Unknown")
+ signoff_group.find_signoffs(signoffs)
+ signoff_group.find_specification(specs)
+ signoff_groups.append(signoff_group)
+
+ return signoff_groups
+
# vim: set ts=4 sw=4 et:
diff --git a/packages/views.py b/packages/views.py
deleted file mode 100644
index f45c25d6..00000000
--- a/packages/views.py
+++ /dev/null
@@ -1,608 +0,0 @@
-from django import forms
-from django.contrib import messages
-from django.contrib.admin.widgets import AdminDateWidget
-from django.contrib.auth.models import User
-from django.contrib.auth.decorators import permission_required
-from django.conf import settings
-from django.core.mail import send_mail
-from django.core.serializers.json import DjangoJSONEncoder
-from django.db.models import Q
-from django.http import HttpResponse, Http404
-from django.shortcuts import get_object_or_404, get_list_or_404, redirect
-from django.template import loader, Context
-from django.utils import simplejson
-from django.views.decorators.cache import never_cache
-from django.views.decorators.http import require_POST
-from django.views.decorators.vary import vary_on_headers
-from django.views.generic import list_detail
-from django.views.generic.simple import direct_to_template
-
-from datetime import datetime
-from operator import attrgetter
-import string
-from urllib import urlencode
-
-from main.models import Package, PackageFile, Arch, Repo
-from main.utils import make_choice, groupby_preserve_order, PackageStandin
-from mirrors.models import MirrorUrl
-from .models import PackageRelation, PackageGroup, Signoff
-from .utils import (get_group_info, get_differences_info,
- get_wrong_permissions, get_current_signoffs)
-
-class PackageJSONEncoder(DjangoJSONEncoder):
- pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver',
- 'pkgrel', 'epoch', 'pkgdesc', 'url', 'filename', 'compressed_size',
- 'installed_size', 'build_date', 'last_update', 'flag_date' ]
-
- def default(self, obj):
- if hasattr(obj, '__iter__'):
- # mainly for queryset serialization
- return list(obj)
- if isinstance(obj, Package):
- data = dict((attr, getattr(obj, attr))
- for attr in self.pkg_attributes)
- data['groups'] = obj.groups.all()
- return data
- if isinstance(obj, PackageFile):
- filename = obj.filename or ''
- return obj.directory + filename
- if isinstance(obj, (Repo, Arch, PackageGroup)):
- return obj.name.lower()
- return super(PackageJSONEncoder, self).default(obj)
-
-def opensearch(request):
- if request.is_secure():
- domain = "https://%s" % request.META['HTTP_HOST']
- else:
- domain = "http://%s" % request.META['HTTP_HOST']
-
- return direct_to_template(request, 'packages/opensearch.xml',
- {'domain': domain},
- mimetype='application/opensearchdescription+xml')
-
-@permission_required('main.change_package')
-@require_POST
-def update(request):
- ids = request.POST.getlist('pkgid')
- count = 0
-
- if request.POST.has_key('adopt'):
- repos = request.user.userprofile.allowed_repos.all()
- pkgs = Package.objects.filter(id__in=ids, repo__in=repos)
- disallowed_pkgs = Package.objects.filter(id__in=ids).exclude(
- repo__in=repos)
-
- if disallowed_pkgs:
- messages.warning(request,
- "You do not have permission to adopt: %s." % (
- ' '.join([p.pkgname for p in disallowed_pkgs])
- ))
-
- for pkg in pkgs:
- if request.user not in pkg.maintainers:
- prel = PackageRelation(pkgbase=pkg.pkgbase,
- user=request.user,
- type=PackageRelation.MAINTAINER)
- count += 1
- prel.save()
-
- messages.info(request, "%d base packages adopted." % count)
-
- elif request.POST.has_key('disown'):
- # allow disowning regardless of allowed repos, helps things like
- # [community] -> [extra] moves
- for pkg in Package.objects.filter(id__in=ids):
- if request.user in pkg.maintainers:
- rels = PackageRelation.objects.filter(pkgbase=pkg.pkgbase,
- user=request.user,
- type=PackageRelation.MAINTAINER)
- count += rels.count()
- rels.delete()
-
- messages.info(request, "%d base packages disowned." % count)
-
- else:
- messages.error(request, "Are you trying to adopt or disown?")
- return redirect('/packages/')
-
-def details(request, name='', repo='', arch=''):
- if all([name, repo, arch]):
- try:
- pkg = Package.objects.select_related(
- 'arch', 'repo', 'packager').get(pkgname=name,
- repo__name__iexact=repo, arch__name=arch)
- return direct_to_template(request, 'packages/details.html',
- {'pkg': pkg, })
- except Package.DoesNotExist:
- arch = get_object_or_404(Arch, name=arch)
- arches = [ arch ]
- arches.extend(Arch.objects.filter(agnostic=True))
- repo = get_object_or_404(Repo, name__iexact=repo)
- pkgs = Package.objects.normal().filter(pkgbase=name,
- repo__testing=repo.testing, repo__staging=repo.staging,
- arch__in=arches).order_by('pkgname')
- if len(pkgs) == 0:
- raise Http404
- context = {
- 'list_title': 'Split Package Details',
- 'name': name,
- 'arch': arch,
- 'packages': pkgs,
- }
- return direct_to_template(request, 'packages/packages_list.html',
- context)
- else:
- pkg_data = [
- ('arch', arch.lower()),
- ('repo', repo.lower()),
- ('q', name),
- ]
- # only include non-blank values in the query we generate
- pkg_data = [(x, y) for x, y in pkg_data if y]
- return redirect("/packages/?%s" % urlencode(pkg_data))
-
-def groups(request, arch=None):
- arches = []
- if arch:
- get_object_or_404(Arch, name=arch, agnostic=False)
- arches.append(arch)
- grps = get_group_info(arches)
- context = {
- 'groups': grps,
- 'arch': arch,
- }
- return direct_to_template(request, 'packages/groups.html', context)
-
-def group_details(request, arch, name):
- arch = get_object_or_404(Arch, name=arch)
- arches = [ arch ]
- arches.extend(Arch.objects.filter(agnostic=True))
- pkgs = Package.objects.normal().filter(
- groups__name=name, arch__in=arches).order_by('pkgname')
- if len(pkgs) == 0:
- raise Http404
- context = {
- 'list_title': 'Group Details',
- 'name': name,
- 'arch': arch,
- 'packages': pkgs,
- }
- return direct_to_template(request, 'packages/packages_list.html', context)
-
-def coerce_limit_value(value):
- if not value:
- return None
- if value == 'all':
- # negative value indicates show all results
- return -1
- value = int(value)
- if value < 0:
- raise ValueError
- return value
-
-class LimitTypedChoiceField(forms.TypedChoiceField):
- def valid_value(self, value):
- try:
- coerce_limit_value(value)
- return True
- except (ValueError, TypeError):
- return False
-
-class PackageSearchForm(forms.Form):
- repo = forms.MultipleChoiceField(required=False)
- arch = forms.MultipleChoiceField(required=False)
- q = forms.CharField(required=False)
- maintainer = forms.ChoiceField(required=False)
- packager = forms.ChoiceField(required=False)
- last_update = forms.DateField(required=False, widget=AdminDateWidget(),
- label='Last Updated After')
- flagged = forms.ChoiceField(
- choices=[('', 'All')] + make_choice(['Flagged', 'Not Flagged']),
- required=False)
- limit = LimitTypedChoiceField(
- choices=make_choice([50, 100, 250]) + [('all', 'All')],
- coerce=coerce_limit_value,
- required=False,
- initial=50)
-
- def __init__(self, *args, **kwargs):
- super(PackageSearchForm, self).__init__(*args, **kwargs)
- self.fields['repo'].choices = make_choice(
- [repo.name for repo in Repo.objects.all()])
- self.fields['arch'].choices = make_choice(
- [arch.name for arch in Arch.objects.all()])
- self.fields['q'].widget.attrs.update({"size": "30"})
- maints = User.objects.filter(is_active=True).order_by('username')
- self.fields['maintainer'].choices = \
- [('', 'All'), ('orphan', 'Orphan')] + \
- [(m.username, m.get_full_name()) for m in maints]
- self.fields['packager'].choices = \
- [('', 'All'), ('unknown', 'Unknown')] + \
- [(m.username, m.get_full_name()) for m in maints]
-
-def search(request, page=None):
- limit = 50
- packages = Package.objects.normal()
-
- if request.GET:
- form = PackageSearchForm(data=request.GET)
- if form.is_valid():
- if form.cleaned_data['repo']:
- packages = packages.filter(
- repo__name__in=form.cleaned_data['repo'])
-
- if form.cleaned_data['arch']:
- packages = packages.filter(
- arch__name__in=form.cleaned_data['arch'])
-
- if form.cleaned_data['maintainer'] == 'orphan':
- inner_q = PackageRelation.objects.all().values('pkgbase')
- packages = packages.exclude(pkgbase__in=inner_q)
- elif form.cleaned_data['maintainer']:
- inner_q = PackageRelation.objects.filter(
- user__username=form.cleaned_data['maintainer']).values('pkgbase')
- packages = packages.filter(pkgbase__in=inner_q)
-
- if form.cleaned_data['packager'] == 'unknown':
- packages = packages.filter(packager__isnull=True)
- elif form.cleaned_data['packager']:
- packages = packages.filter(
- packager__username=form.cleaned_data['packager'])
-
- if form.cleaned_data['flagged'] == 'Flagged':
- packages = packages.filter(flag_date__isnull=False)
- elif form.cleaned_data['flagged'] == 'Not Flagged':
- packages = packages.filter(flag_date__isnull=True)
-
- if form.cleaned_data['q']:
- query = form.cleaned_data['q']
- q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query)
- packages = packages.filter(q)
- if form.cleaned_data['last_update']:
- lu = form.cleaned_data['last_update']
- packages = packages.filter(last_update__gte=
- datetime(lu.year, lu.month, lu.day, 0, 0))
-
- asked_limit = form.cleaned_data['limit']
- if asked_limit and asked_limit < 0:
- limit = None
- elif asked_limit:
- limit = asked_limit
- else:
- # Form had errors, don't return any results, just the busted form
- packages = Package.objects.none()
- else:
- form = PackageSearchForm()
-
- current_query = request.GET.urlencode()
- page_dict = {
- 'search_form': form,
- 'current_query': current_query
- }
- allowed_sort = ["arch", "repo", "pkgname", "pkgbase",
- "compressed_size", "installed_size",
- "build_date", "last_update", "flag_date"]
- allowed_sort += ["-" + s for s in allowed_sort]
- sort = request.GET.get('sort', None)
- # TODO: sorting by multiple fields makes using a DB index much harder
- if sort in allowed_sort:
- packages = packages.order_by(
- request.GET['sort'], 'repo', 'arch', 'pkgname')
- page_dict['sort'] = sort
- else:
- packages = packages.order_by('pkgname')
-
- return list_detail.object_list(request, packages,
- template_name="packages/search.html",
- page=page,
- paginate_by=limit,
- template_object_name="package",
- extra_context=page_dict)
-
-@vary_on_headers('X-Requested-With')
-def files(request, name, repo, arch):
- pkg = get_object_or_404(Package,
- pkgname=name, repo__name__iexact=repo, arch__name=arch)
- fileslist = PackageFile.objects.filter(pkg=pkg).order_by('directory', 'filename')
- context = {
- 'pkg': pkg,
- 'files': fileslist,
- }
- template = 'packages/files.html'
- if request.is_ajax():
- template = 'packages/files-list.html'
- return direct_to_template(request, template, context)
-
-def details_json(request, name, repo, arch):
- pkg = get_object_or_404(Package,
- pkgname=name, repo__name__iexact=repo, arch__name=arch)
- to_json = simplejson.dumps(pkg, ensure_ascii=False,
- cls=PackageJSONEncoder)
- return HttpResponse(to_json, mimetype='application/json')
-
-def files_json(request, name, repo, arch):
- pkg = get_object_or_404(Package,
- pkgname=name, repo__name__iexact=repo, arch__name=arch)
- fileslist = PackageFile.objects.filter(pkg=pkg).order_by('directory', 'filename')
- data = {
- 'pkgname': pkg.pkgname,
- 'repo': pkg.repo.name.lower(),
- 'arch': pkg.arch.name.lower(),
- 'files': fileslist,
- }
- to_json = simplejson.dumps(data, ensure_ascii=False,
- cls=PackageJSONEncoder)
- return HttpResponse(to_json, mimetype='application/json')
-
-@permission_required('main.change_package')
-def unflag(request, name, repo, arch):
- pkg = get_object_or_404(Package,
- pkgname=name, repo__name__iexact=repo, arch__name=arch)
- pkg.flag_date = None
- pkg.save()
- return redirect(pkg)
-
-@permission_required('main.change_package')
-def unflag_all(request, name, repo, arch):
- pkg = get_object_or_404(Package,
- pkgname=name, repo__name__iexact=repo, arch__name=arch)
- # find all packages from (hopefully) the same PKGBUILD
- pkgs = Package.objects.filter(pkgbase=pkg.pkgbase,
- repo__testing=pkg.repo.testing, repo__staging=pkg.repo.staging)
- pkgs.update(flag_date=None)
- return redirect(pkg)
-
-class PackageSignoffGroup(object):
- '''Encompasses all packages in testing with the same pkgbase.'''
- def __init__(self, packages, target_repo=None, signoffs=None):
- if len(packages) == 0:
- raise Exception
- self.packages = packages
- self.target_repo = target_repo
- self.signoffs = signoffs
-
- first = packages[0]
- self.pkgbase = first.pkgbase
- self.arch = first.arch
- self.repo = first.repo
- self.version = ''
-
- version = first.full_version
- if all(version == pkg.full_version for pkg in packages):
- self.version = version
-
- @property
- def package(self):
- '''Try and return a relevant single package object representing this
- group. Start by seeing if there is only one package, then look for the
- matching package by name, finally falling back to a standin package
- object.'''
- if len(self.packages) == 1:
- return self.packages[0]
-
- same_pkgs = [p for p in self.packages if p.pkgname == p.pkgbase]
- if same_pkgs:
- return same_pkgs[0]
-
- return PackageStandin(self.packages[0])
-
- def find_signoffs(self, all_signoffs):
- '''Look through a list of Signoff objects for ones matching this
- particular group and store them on the object.'''
- if self.signoffs is None:
- self.signoffs = []
- for s in all_signoffs:
- if s.pkgbase != self.pkgbase:
- continue
- if self.version and not s.full_version == self.version:
- continue
- if s.arch_id == self.arch.id and s.repo_id == self.repo.id:
- self.signoffs.append(s)
-
- def approved(self):
- if self.signoffs:
- good_signoffs = [s for s in self.signoffs if not s.revoked]
- return len(good_signoffs) >= Signoff.REQUIRED
- return False
-
-@permission_required('main.change_package')
-@never_cache
-def signoffs(request):
- test_pkgs = Package.objects.normal().filter(repo__testing=True)
- packages = test_pkgs.order_by('pkgname')
-
- # Collect all pkgbase values in testing repos
- q_pkgbase = test_pkgs.values('pkgbase')
- package_repos = Package.objects.order_by().values_list(
- 'pkgbase', 'repo__name').filter(
- repo__testing=False, repo__staging=False,
- pkgbase__in=q_pkgbase).distinct()
- pkgtorepo = dict(package_repos)
-
- # Collect all existing signoffs for these packages
- signoffs = get_current_signoffs()
-
- same_pkgbase_key = lambda x: (x.repo.name, x.arch.name, x.pkgbase)
- grouped = groupby_preserve_order(packages, same_pkgbase_key)
- signoff_groups = []
- for group in grouped:
- signoff_group = PackageSignoffGroup(group)
- signoff_group.target_repo = pkgtorepo.get(signoff_group.pkgbase,
- "Unknown")
- signoff_group.find_signoffs(signoffs)
- signoff_groups.append(signoff_group)
-
- signoff_groups.sort(key=attrgetter('pkgbase'))
-
- return direct_to_template(request, 'packages/signoffs.html',
- {'signoff_groups': signoff_groups})
-
-@permission_required('main.change_package')
-@never_cache
-def signoff_package(request, name, repo, arch):
- packages = get_list_or_404(Package, pkgbase=name,
- arch__name=arch, repo__name__iexact=repo, repo__testing=True)
-
- pkg = packages[0]
- signoff, created = Signoff.objects.get_or_create(
- pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel,
- epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo, user=request.user)
-
- if request.is_ajax():
- data = {
- 'created': created,
- 'approved': pkg.approved_for_signoff(),
- 'user': str(request.user),
- }
- return HttpResponse(simplejson.dumps(data),
- mimetype='application/json')
-
- return redirect('package-signoffs')
-
-def flaghelp(request):
- return direct_to_template(request, 'packages/flaghelp.html')
-
-class FlagForm(forms.Form):
- email = forms.EmailField(label='* E-mail Address')
- usermessage = forms.CharField(label='Message To Dev',
- widget=forms.Textarea, required=False)
- # The field below is used to filter out bots that blindly fill out all input elements
- website = forms.CharField(label='',
- widget=forms.TextInput(attrs={'style': 'display:none;'}),
- required=False)
-
-@never_cache
-def flag(request, name, repo, arch):
- pkg = get_object_or_404(Package,
- pkgname=name, repo__name__iexact=repo, arch__name=arch)
- if pkg.flag_date is not None:
- # already flagged. do nothing.
- return direct_to_template(request, 'packages/flagged.html', {'pkg': pkg})
- # find all packages from (hopefully) the same PKGBUILD
- pkgs = Package.objects.normal().filter(
- pkgbase=pkg.pkgbase, flag_date__isnull=True,
- repo__testing=pkg.repo.testing,
- repo__staging=pkg.repo.staging).order_by(
- 'pkgname', 'repo__name', 'arch__name')
-
- if request.POST:
- form = FlagForm(request.POST)
- if form.is_valid() and form.cleaned_data['website'] == '':
- # save the package list for later use
- flagged_pkgs = list(pkgs)
- pkgs.update(flag_date=datetime.utcnow())
-
- maints = pkg.maintainers
- if not maints:
- toemail = settings.NOTIFICATIONS
- subject = 'Orphan %s package [%s] marked out-of-date' % \
- (pkg.repo.name, pkg.pkgname)
- else:
- toemail = []
- subject = '%s package [%s] marked out-of-date' % \
- (pkg.repo.name, pkg.pkgname)
- for maint in maints:
- if maint.get_profile().notify == True:
- toemail.append(maint.email)
-
- if toemail:
- # send notification email to the maintainers
- t = loader.get_template('packages/outofdate.txt')
- c = Context({
- 'email': form.cleaned_data['email'],
- 'message': form.cleaned_data['usermessage'],
- 'pkg': pkg,
- 'packages': flagged_pkgs,
- })
- send_mail(subject,
- t.render(c),
- 'Parabola Packages <packages@list.parabolagnulinux.org>',
- toemail,
- fail_silently=True)
-
- return redirect('package-flag-confirmed', name=name, repo=repo,
- arch=arch)
- else:
- form = FlagForm()
-
- context = {
- 'package': pkg,
- 'packages': pkgs,
- 'form': form
- }
- return direct_to_template(request, 'packages/flag.html', context)
-
-def flag_confirmed(request, name, repo, arch):
- pkg = get_object_or_404(Package,
- pkgname=name, repo__name__iexact=repo, arch__name=arch)
- pkgs = Package.objects.normal().filter(
- pkgbase=pkg.pkgbase, flag_date=pkg.flag_date,
- repo__testing=pkg.repo.testing,
- repo__staging=pkg.repo.staging).order_by(
- 'pkgname', 'repo__name', 'arch__name')
-
- context = {'package': pkg, 'packages': pkgs}
-
- return direct_to_template(request, 'packages/flag_confirmed.html', context)
-
-def download(request, name, repo, arch):
- pkg = get_object_or_404(Package,
- pkgname=name, repo__name__iexact=repo, arch__name=arch)
- mirrorurl = MirrorUrl.objects.filter(mirror__country='Any',
- mirror__public=True, mirror__active=True,
- protocol__protocol__iexact='HTTP')[0]
- arch = pkg.arch.name
- if pkg.arch.agnostic:
- # grab the first non-any arch to fake the download path
- arch = Arch.objects.exclude(agnostic=True)[0].name
- values = {
- 'host': mirrorurl.url,
- 'arch': arch,
- 'repo': pkg.repo.name.lower(),
- 'file': pkg.filename,
- }
- url = string.Template('${host}${repo}/os/${arch}/${file}').substitute(values)
- return redirect(url)
-
-def arch_differences(request):
- # TODO: we have some hardcoded magic here with respect to the arches.
- arch_a = Arch.objects.get(name='i686')
- arch_b = Arch.objects.get(name='x86_64')
- differences = get_differences_info(arch_a, arch_b)
- context = {
- 'arch_a': arch_a,
- 'arch_b': arch_b,
- 'differences': differences,
- }
- return direct_to_template(request, 'packages/differences.html', context)
-
-@permission_required('main.change_package')
-@never_cache
-def stale_relations(request):
- relations = PackageRelation.objects.select_related('user')
- pkgbases = Package.objects.all().values('pkgbase')
-
- inactive_user = relations.filter(user__is_active=False)
- missing_pkgbase = relations.exclude(
- pkgbase__in=pkgbases).order_by('pkgbase')
- wrong_permissions = get_wrong_permissions()
-
- context = {
- 'inactive_user': inactive_user,
- 'missing_pkgbase': missing_pkgbase,
- 'wrong_permissions': wrong_permissions,
- }
- return direct_to_template(request, 'packages/stale_relations.html', context)
-
-@permission_required('packages.delete_packagerelation')
-@require_POST
-def stale_relations_update(request):
- ids = set(request.POST.getlist('relation_id'))
-
- if ids:
- PackageRelation.objects.filter(id__in=ids).delete()
-
- messages.info(request, "%d package relations deleted." % len(ids))
- return redirect('/packages/stale_relations/')
-
-# vim: set ts=4 sw=4 et:
diff --git a/packages/views/__init__.py b/packages/views/__init__.py
new file mode 100644
index 00000000..bbfe7c9f
--- /dev/null
+++ b/packages/views/__init__.py
@@ -0,0 +1,269 @@
+from django.contrib import messages
+from django.contrib.auth.decorators import permission_required
+from django.core.serializers.json import DjangoJSONEncoder
+from django.http import HttpResponse, Http404
+from django.shortcuts import get_object_or_404, redirect
+from django.utils import simplejson
+from django.views.decorators.cache import never_cache
+from django.views.decorators.http import require_POST
+from django.views.decorators.vary import vary_on_headers
+from django.views.generic.simple import direct_to_template
+
+from string import Template
+from urllib import urlencode
+
+from main.models import Package, PackageFile, Arch, Repo
+from mirrors.models import MirrorUrl
+from ..models import PackageRelation, PackageGroup
+from ..utils import (get_group_info, get_differences_info,
+ get_wrong_permissions)
+
+# make other views available from this same package
+from .flag import flaghelp, flag, flag_confirmed, unflag, unflag_all
+from .search import search
+from .signoff import signoffs, signoff_package, signoff_options, signoffs_json
+
+
+class PackageJSONEncoder(DjangoJSONEncoder):
+ pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver',
+ 'pkgrel', 'epoch', 'pkgdesc', 'url', 'filename', 'compressed_size',
+ 'installed_size', 'build_date', 'last_update', 'flag_date' ]
+
+ def default(self, obj):
+ if hasattr(obj, '__iter__'):
+ # mainly for queryset serialization
+ return list(obj)
+ if isinstance(obj, Package):
+ data = dict((attr, getattr(obj, attr))
+ for attr in self.pkg_attributes)
+ data['groups'] = obj.groups.all()
+ return data
+ if isinstance(obj, PackageFile):
+ filename = obj.filename or ''
+ return obj.directory + filename
+ if isinstance(obj, (Repo, Arch, PackageGroup)):
+ return obj.name.lower()
+ return super(PackageJSONEncoder, self).default(obj)
+
+def opensearch(request):
+ if request.is_secure():
+ domain = "https://%s" % request.META['HTTP_HOST']
+ else:
+ domain = "http://%s" % request.META['HTTP_HOST']
+
+ return direct_to_template(request, 'packages/opensearch.xml',
+ {'domain': domain},
+ mimetype='application/opensearchdescription+xml')
+
+@permission_required('main.change_package')
+@require_POST
+def update(request):
+ ids = request.POST.getlist('pkgid')
+ count = 0
+
+ if request.POST.has_key('adopt'):
+ repos = request.user.userprofile.allowed_repos.all()
+ pkgs = Package.objects.filter(id__in=ids, repo__in=repos)
+ disallowed_pkgs = Package.objects.filter(id__in=ids).exclude(
+ repo__in=repos)
+
+ if disallowed_pkgs:
+ messages.warning(request,
+ "You do not have permission to adopt: %s." % (
+ ' '.join([p.pkgname for p in disallowed_pkgs])
+ ))
+
+ for pkg in pkgs:
+ if request.user not in pkg.maintainers:
+ prel = PackageRelation(pkgbase=pkg.pkgbase,
+ user=request.user,
+ type=PackageRelation.MAINTAINER)
+ count += 1
+ prel.save()
+
+ messages.info(request, "%d base packages adopted." % count)
+
+ elif request.POST.has_key('disown'):
+ # allow disowning regardless of allowed repos, helps things like
+ # [community] -> [extra] moves
+ for pkg in Package.objects.filter(id__in=ids):
+ if request.user in pkg.maintainers:
+ rels = PackageRelation.objects.filter(pkgbase=pkg.pkgbase,
+ user=request.user,
+ type=PackageRelation.MAINTAINER)
+ count += rels.count()
+ rels.delete()
+
+ messages.info(request, "%d base packages disowned." % count)
+
+ else:
+ messages.error(request, "Are you trying to adopt or disown?")
+ return redirect('/packages/')
+
+def details(request, name='', repo='', arch=''):
+ if all([name, repo, arch]):
+ try:
+ pkg = Package.objects.select_related(
+ 'arch', 'repo', 'packager').get(pkgname=name,
+ repo__name__iexact=repo, arch__name=arch)
+ return direct_to_template(request, 'packages/details.html',
+ {'pkg': pkg, })
+ except Package.DoesNotExist:
+ arch = get_object_or_404(Arch, name=arch)
+ arches = [ arch ]
+ arches.extend(Arch.objects.filter(agnostic=True))
+ repo = get_object_or_404(Repo, name__iexact=repo)
+ pkgs = Package.objects.normal().filter(pkgbase=name,
+ repo__testing=repo.testing, repo__staging=repo.staging,
+ arch__in=arches).order_by('pkgname')
+ if len(pkgs) == 0:
+ raise Http404
+ context = {
+ 'list_title': 'Split Package Details',
+ 'name': name,
+ 'arch': arch,
+ 'packages': pkgs,
+ }
+ return direct_to_template(request, 'packages/packages_list.html',
+ context)
+ else:
+ pkg_data = [
+ ('arch', arch.lower()),
+ ('repo', repo.lower()),
+ ('q', name),
+ ]
+ # only include non-blank values in the query we generate
+ pkg_data = [(x, y) for x, y in pkg_data if y]
+ return redirect("/packages/?%s" % urlencode(pkg_data))
+
+def groups(request, arch=None):
+ arches = []
+ if arch:
+ get_object_or_404(Arch, name=arch, agnostic=False)
+ arches.append(arch)
+ grps = get_group_info(arches)
+ context = {
+ 'groups': grps,
+ 'arch': arch,
+ }
+ return direct_to_template(request, 'packages/groups.html', context)
+
+def group_details(request, arch, name):
+ arch = get_object_or_404(Arch, name=arch)
+ arches = [ arch ]
+ arches.extend(Arch.objects.filter(agnostic=True))
+ pkgs = Package.objects.normal().filter(
+ groups__name=name, arch__in=arches).order_by('pkgname')
+ if len(pkgs) == 0:
+ raise Http404
+ context = {
+ 'list_title': 'Group Details',
+ 'name': name,
+ 'arch': arch,
+ 'packages': pkgs,
+ }
+ return direct_to_template(request, 'packages/packages_list.html', context)
+
+@vary_on_headers('X-Requested-With')
+def files(request, name, repo, arch):
+ pkg = get_object_or_404(Package,
+ pkgname=name, repo__name__iexact=repo, arch__name=arch)
+ fileslist = PackageFile.objects.filter(pkg=pkg).order_by('directory', 'filename')
+ context = {
+ 'pkg': pkg,
+ 'files': fileslist,
+ }
+ template = 'packages/files.html'
+ if request.is_ajax():
+ template = 'packages/files-list.html'
+ return direct_to_template(request, template, context)
+
+def details_json(request, name, repo, arch):
+ pkg = get_object_or_404(Package,
+ pkgname=name, repo__name__iexact=repo, arch__name=arch)
+ to_json = simplejson.dumps(pkg, ensure_ascii=False,
+ cls=PackageJSONEncoder)
+ return HttpResponse(to_json, mimetype='application/json')
+
+def files_json(request, name, repo, arch):
+ pkg = get_object_or_404(Package,
+ pkgname=name, repo__name__iexact=repo, arch__name=arch)
+ fileslist = PackageFile.objects.filter(pkg=pkg).order_by('directory', 'filename')
+ data = {
+ 'pkgname': pkg.pkgname,
+ 'repo': pkg.repo.name.lower(),
+ 'arch': pkg.arch.name.lower(),
+ 'files': fileslist,
+ }
+ to_json = simplejson.dumps(data, ensure_ascii=False,
+ cls=PackageJSONEncoder)
+ return HttpResponse(to_json, mimetype='application/json')
+
+def download(request, name, repo, arch):
+ pkg = get_object_or_404(Package,
+ pkgname=name, repo__name__iexact=repo, arch__name=arch)
+ mirror_urls = MirrorUrl.objects.filter(
+ mirror__public=True, mirror__active=True,
+ protocol__protocol__iexact='HTTP')
+ # look first for an 'Any' URL, then fall back to any HTTP URL
+ filtered_urls = mirror_urls.filter(mirror__country='Any')[:1]
+ if not filtered_urls:
+ filtered_urls = mirror_urls[:1]
+ if not filtered_urls:
+ raise Http404
+ arch = pkg.arch.name
+ if pkg.arch.agnostic:
+ # grab the first non-any arch to fake the download path
+ arch = Arch.objects.exclude(agnostic=True)[0].name
+ values = {
+ 'host': filtered_urls[0].url,
+ 'arch': arch,
+ 'repo': pkg.repo.name.lower(),
+ 'file': pkg.filename,
+ }
+ url = Template('${host}${repo}/os/${arch}/${file}').substitute(values)
+ return redirect(url)
+
+def arch_differences(request):
+ # TODO: we have some hardcoded magic here with respect to the arches.
+ arch_a = Arch.objects.get(name=request.GET.get('arch_a', 'i686'))
+ arch_b = Arch.objects.get(name=request.GET.get('arch_b', 'x86_64'))
+ differences = get_differences_info(arch_a, arch_b)
+ context = {
+ 'arch_a': arch_a,
+ 'arch_b': arch_b,
+ 'differences': differences,
+ 'arches': Arch.objects.filter(agnostic=False)
+ }
+ return direct_to_template(request, 'packages/differences.html', context)
+
+@permission_required('main.change_package')
+@never_cache
+def stale_relations(request):
+ relations = PackageRelation.objects.select_related('user')
+ pkgbases = Package.objects.all().values('pkgbase')
+
+ inactive_user = relations.filter(user__is_active=False)
+ missing_pkgbase = relations.exclude(
+ pkgbase__in=pkgbases).order_by('pkgbase')
+ wrong_permissions = get_wrong_permissions()
+
+ context = {
+ 'inactive_user': inactive_user,
+ 'missing_pkgbase': missing_pkgbase,
+ 'wrong_permissions': wrong_permissions,
+ }
+ return direct_to_template(request, 'packages/stale_relations.html', context)
+
+@permission_required('packages.delete_packagerelation')
+@require_POST
+def stale_relations_update(request):
+ ids = set(request.POST.getlist('relation_id'))
+
+ if ids:
+ PackageRelation.objects.filter(id__in=ids).delete()
+
+ messages.info(request, "%d package relations deleted." % len(ids))
+ return redirect('/packages/stale_relations/')
+
+# vim: set ts=4 sw=4 et:
diff --git a/packages/views/flag.py b/packages/views/flag.py
new file mode 100644
index 00000000..5db2ea69
--- /dev/null
+++ b/packages/views/flag.py
@@ -0,0 +1,121 @@
+from datetime import datetime
+
+from django import forms
+from django.conf import settings
+from django.contrib.auth.decorators import permission_required
+from django.core.mail import send_mail
+from django.shortcuts import get_object_or_404, redirect
+from django.template import loader, Context
+from django.views.generic.simple import direct_to_template
+from django.views.decorators.cache import never_cache
+
+from main.models import Package
+
+
+def flaghelp(request):
+ return direct_to_template(request, 'packages/flaghelp.html')
+
+class FlagForm(forms.Form):
+ email = forms.EmailField(label='* E-mail Address')
+ usermessage = forms.CharField(label='Message To Dev',
+ widget=forms.Textarea, required=False)
+ # The field below is used to filter out bots that blindly fill out all
+ # input elements
+ website = forms.CharField(label='',
+ widget=forms.TextInput(attrs={'style': 'display:none;'}),
+ required=False)
+
+@never_cache
+def flag(request, name, repo, arch):
+ pkg = get_object_or_404(Package,
+ pkgname=name, repo__name__iexact=repo, arch__name=arch)
+ if pkg.flag_date is not None:
+ # already flagged. do nothing.
+ return direct_to_template(request, 'packages/flagged.html',
+ {'pkg': pkg})
+ # find all packages from (hopefully) the same PKGBUILD
+ pkgs = Package.objects.normal().filter(
+ pkgbase=pkg.pkgbase, flag_date__isnull=True,
+ repo__testing=pkg.repo.testing,
+ repo__staging=pkg.repo.staging).order_by(
+ 'pkgname', 'repo__name', 'arch__name')
+
+ if request.POST:
+ form = FlagForm(request.POST)
+ if form.is_valid() and form.cleaned_data['website'] == '':
+ # save the package list for later use
+ flagged_pkgs = list(pkgs)
+ pkgs.update(flag_date=datetime.utcnow())
+
+ maints = pkg.maintainers
+ if not maints:
+ toemail = settings.NOTIFICATIONS
+ subject = 'Orphan %s package [%s] marked out-of-date' % \
+ (pkg.repo.name, pkg.pkgname)
+ else:
+ toemail = []
+ subject = '%s package [%s] marked out-of-date' % \
+ (pkg.repo.name, pkg.pkgname)
+ for maint in maints:
+ if maint.get_profile().notify == True:
+ toemail.append(maint.email)
+
+ if toemail:
+ # send notification email to the maintainers
+ tmpl = loader.get_template('packages/outofdate.txt')
+ ctx = Context({
+ 'email': form.cleaned_data['email'],
+ 'message': form.cleaned_data['usermessage'],
+ 'pkg': pkg,
+ 'packages': flagged_pkgs,
+ })
+ send_mail(subject,
+ tmpl.render(ctx),
+ 'Parabola Website Notification <nobody@parabolagnulinux.org>',
+ toemail,
+ fail_silently=True)
+
+ return redirect('package-flag-confirmed', name=name, repo=repo,
+ arch=arch)
+ else:
+ form = FlagForm()
+
+ context = {
+ 'package': pkg,
+ 'packages': pkgs,
+ 'form': form
+ }
+ return direct_to_template(request, 'packages/flag.html', context)
+
+def flag_confirmed(request, name, repo, arch):
+ pkg = get_object_or_404(Package,
+ pkgname=name, repo__name__iexact=repo, arch__name=arch)
+ pkgs = Package.objects.normal().filter(
+ pkgbase=pkg.pkgbase, flag_date=pkg.flag_date,
+ repo__testing=pkg.repo.testing,
+ repo__staging=pkg.repo.staging).order_by(
+ 'pkgname', 'repo__name', 'arch__name')
+
+ context = {'package': pkg, 'packages': pkgs}
+
+ return direct_to_template(request, 'packages/flag_confirmed.html', context)
+
+@permission_required('main.change_package')
+def unflag(request, name, repo, arch):
+ pkg = get_object_or_404(Package,
+ pkgname=name, repo__name__iexact=repo, arch__name=arch)
+ pkg.flag_date = None
+ pkg.save()
+ return redirect(pkg)
+
+@permission_required('main.change_package')
+def unflag_all(request, name, repo, arch):
+ pkg = get_object_or_404(Package,
+ pkgname=name, repo__name__iexact=repo, arch__name=arch)
+ # find all packages from (hopefully) the same PKGBUILD
+ pkgs = Package.objects.filter(pkgbase=pkg.pkgbase,
+ repo__testing=pkg.repo.testing, repo__staging=pkg.repo.staging)
+ pkgs.update(flag_date=None)
+ return redirect(pkg)
+
+# vim: set ts=4 sw=4 et:
diff --git a/packages/views/search.py b/packages/views/search.py
new file mode 100644
index 00000000..65fcddb3
--- /dev/null
+++ b/packages/views/search.py
@@ -0,0 +1,168 @@
+from datetime import datetime
+
+from django import forms
+from django.contrib.admin.widgets import AdminDateWidget
+from django.contrib.auth.models import User
+from django.db.models import Q
+from django.views.generic import list_detail
+
+from main.models import Package, Arch, Repo
+from main.utils import make_choice
+from ..models import PackageRelation
+
+
+def coerce_limit_value(value):
+ if not value:
+ return None
+ if value == 'all':
+ # negative value indicates show all results
+ return -1
+ value = int(value)
+ if value < 0:
+ raise ValueError
+ return value
+
+class LimitTypedChoiceField(forms.TypedChoiceField):
+ def valid_value(self, value):
+ try:
+ coerce_limit_value(value)
+ return True
+ except (ValueError, TypeError):
+ return False
+
+class PackageSearchForm(forms.Form):
+ repo = forms.MultipleChoiceField(required=False)
+ arch = forms.MultipleChoiceField(required=False)
+ name = forms.CharField(required=False)
+ desc = forms.CharField(required=False)
+ q = forms.CharField(required=False)
+ sort = forms.CharField(required=False)
+ maintainer = forms.ChoiceField(required=False)
+ packager = forms.ChoiceField(required=False)
+ last_update = forms.DateField(required=False, widget=AdminDateWidget(),
+ label='Last Updated After')
+ flagged = forms.ChoiceField(
+ choices=[('', 'All')] + make_choice(['Flagged', 'Not Flagged']),
+ required=False)
+ signed = forms.ChoiceField(
+ choices=[('', 'All')] + make_choice(['Signed', 'Unsigned']),
+ required=False)
+ limit = LimitTypedChoiceField(
+ choices=make_choice([50, 100, 250]) + [('all', 'All')],
+ coerce=coerce_limit_value,
+ required=False,
+ initial=50)
+
+ def __init__(self, *args, **kwargs):
+ super(PackageSearchForm, self).__init__(*args, **kwargs)
+ self.fields['repo'].choices = make_choice(
+ [repo.name for repo in Repo.objects.all()])
+ self.fields['arch'].choices = make_choice(
+ [arch.name for arch in Arch.objects.all()])
+ self.fields['q'].widget.attrs.update({"size": "30"})
+ maints = User.objects.filter(is_active=True).order_by(
+ 'first_name', 'last_name')
+ self.fields['maintainer'].choices = \
+ [('', 'All'), ('orphan', 'Orphan')] + \
+ [(m.username, m.get_full_name()) for m in maints]
+ self.fields['packager'].choices = \
+ [('', 'All'), ('unknown', 'Unknown')] + \
+ [(m.username, m.get_full_name()) for m in maints]
+
+def parse_form(form, packages):
+ if form.cleaned_data['repo']:
+ packages = packages.filter(
+ repo__name__in=form.cleaned_data['repo'])
+
+ if form.cleaned_data['arch']:
+ packages = packages.filter(
+ arch__name__in=form.cleaned_data['arch'])
+
+ if form.cleaned_data['maintainer'] == 'orphan':
+ inner_q = PackageRelation.objects.all().values('pkgbase')
+ packages = packages.exclude(pkgbase__in=inner_q)
+ elif form.cleaned_data['maintainer']:
+ inner_q = PackageRelation.objects.filter(
+ user__username=form.cleaned_data['maintainer']).values('pkgbase')
+ packages = packages.filter(pkgbase__in=inner_q)
+
+ if form.cleaned_data['packager'] == 'unknown':
+ packages = packages.filter(packager__isnull=True)
+ elif form.cleaned_data['packager']:
+ packages = packages.filter(
+ packager__username=form.cleaned_data['packager'])
+
+ if form.cleaned_data['flagged'] == 'Flagged':
+ packages = packages.filter(flag_date__isnull=False)
+ elif form.cleaned_data['flagged'] == 'Not Flagged':
+ packages = packages.filter(flag_date__isnull=True)
+
+ if form.cleaned_data['signed'] == 'Signed':
+ packages = packages.filter(pgp_signature__isnull=False)
+ elif form.cleaned_data['signed'] == 'Unsigned':
+ packages = packages.filter(pgp_signature__isnull=True)
+
+ if form.cleaned_data['last_update']:
+ lu = form.cleaned_data['last_update']
+ packages = packages.filter(last_update__gte=
+ datetime(lu.year, lu.month, lu.day, 0, 0))
+
+ if form.cleaned_data['name']:
+ name = form.cleaned_data['name']
+ packages = packages.filter(pkgname__icontains=name)
+
+ if form.cleaned_data['desc']:
+ desc = form.cleaned_data['desc']
+ packages = packages.filter(pkgdesc__icontains=desc)
+
+ if form.cleaned_data['q']:
+ query = form.cleaned_data['q']
+ q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query)
+ packages = packages.filter(q)
+
+ return packages
+
+def search(request, page=None):
+ limit = 50
+ sort = None
+ packages = Package.objects.normal()
+
+ if request.GET:
+ form = PackageSearchForm(data=request.GET)
+ if form.is_valid():
+ packages = parse_form(form, packages)
+ asked_limit = form.cleaned_data['limit']
+ if asked_limit and asked_limit < 0:
+ limit = None
+ elif asked_limit:
+ limit = asked_limit
+ sort = form.cleaned_data['sort']
+ else:
+ # Form had errors, don't return any results, just the busted form
+ packages = Package.objects.none()
+ else:
+ form = PackageSearchForm()
+
+ current_query = request.GET.urlencode()
+ page_dict = {
+ 'search_form': form,
+ 'current_query': current_query
+ }
+ allowed_sort = ["arch", "repo", "pkgname", "pkgbase",
+ "compressed_size", "installed_size",
+ "build_date", "last_update", "flag_date"]
+ allowed_sort += ["-" + s for s in allowed_sort]
+ if sort in allowed_sort:
+ packages = packages.order_by(sort)
+ page_dict['sort'] = sort
+ else:
+ packages = packages.order_by('pkgname')
+
+ return list_detail.object_list(request, packages,
+ template_name="packages/search.html",
+ page=page,
+ paginate_by=limit,
+ template_object_name="package",
+ extra_context=page_dict)
+
+# vim: set ts=4 sw=4 et:
diff --git a/packages/views/signoff.py b/packages/views/signoff.py
new file mode 100644
index 00000000..e57b4d9a
--- /dev/null
+++ b/packages/views/signoff.py
@@ -0,0 +1,193 @@
+from datetime import datetime
+from operator import attrgetter
+
+from django import forms
+from django.contrib.auth.decorators import permission_required
+from django.contrib.auth.models import User
+from django.core.serializers.json import DjangoJSONEncoder
+from django.db import transaction
+from django.http import HttpResponse, Http404
+from django.shortcuts import get_list_or_404, redirect, render
+from django.utils import simplejson
+from django.views.decorators.cache import never_cache
+from django.views.generic.simple import direct_to_template
+
+from main.models import Package, Arch, Repo
+from ..models import SignoffSpecification, Signoff
+from ..utils import (get_signoff_groups, approved_by_signoffs,
+ PackageSignoffGroup)
+
+@permission_required('main.change_package')
+@never_cache
+def signoffs(request):
+ signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase'))
+ for group in signoff_groups:
+ group.user = request.user
+
+ context = {
+ 'signoff_groups': signoff_groups,
+ 'arches': Arch.objects.all(),
+ 'repo_names': sorted(set(g.target_repo for g in signoff_groups)),
+ }
+ return direct_to_template(request, 'packages/signoffs.html', context)
+
+@permission_required('main.change_package')
+@never_cache
+def signoff_package(request, name, repo, arch, revoke=False):
+ packages = get_list_or_404(Package, pkgbase=name,
+ arch__name=arch, repo__name__iexact=repo, repo__testing=True)
+ package = packages[0]
+
+ spec = SignoffSpecification.objects.get_or_default_from_package(package)
+
+ if revoke:
+ try:
+ signoff = Signoff.objects.get_from_package(
+ package, request.user, False)
+ except Signoff.DoesNotExist:
+ raise Http404
+ signoff.revoked = datetime.utcnow()
+ signoff.save()
+ created = False
+ else:
+ # ensure we should even be accepting signoffs
+ if spec.known_bad or not spec.enabled:
+ return render(request, '403.html', status=403)
+ signoff, created = Signoff.objects.get_or_create_from_package(
+ package, request.user)
+
+ all_signoffs = Signoff.objects.for_package(package)
+
+ if request.is_ajax():
+ data = {
+ 'created': created,
+ 'revoked': bool(signoff.revoked),
+ 'approved': approved_by_signoffs(all_signoffs, spec),
+ 'required': spec.required,
+ 'enabled': spec.enabled,
+ 'known_bad': spec.known_bad,
+ 'user': str(request.user),
+ }
+ return HttpResponse(simplejson.dumps(data, ensure_ascii=False),
+ mimetype='application/json')
+
+ return redirect('package-signoffs')
+
+class SignoffOptionsForm(forms.ModelForm):
+ apply_all = forms.BooleanField(required=False,
+ help_text="Apply these options to all architectures?")
+
+ class Meta:
+ model = SignoffSpecification
+ fields = ('required', 'enabled', 'known_bad', 'comments')
+
+def _signoff_options_all(request, name, repo):
+ seen_ids = set()
+ with transaction.commit_on_success():
+ # find or create a specification for all architectures, then
+ # graft the form data onto them
+ packages = Package.objects.filter(pkgbase=name,
+ repo__name__iexact=repo, repo__testing=True)
+ for package in packages:
+ try:
+ spec = SignoffSpecification.objects.get_from_package(package)
+ if spec.pk in seen_ids:
+ continue
+ except SignoffSpecification.DoesNotExist:
+ spec = SignoffSpecification(pkgbase=package.pkgbase,
+ pkgver=package.pkgver, pkgrel=package.pkgrel,
+ epoch=package.epoch, arch=package.arch,
+ repo=package.repo)
+
+ if spec.user is None:
+ spec.user = request.user
+
+ form = SignoffOptionsForm(request.POST, instance=spec)
+ if form.is_valid():
+ form.save()
+ seen_ids.add(form.instance.pk)
+
+@permission_required('main.change_package')
+@never_cache
+def signoff_options(request, name, repo, arch):
+ packages = get_list_or_404(Package, pkgbase=name,
+ arch__name=arch, repo__name__iexact=repo, repo__testing=True)
+ package = packages[0]
+
+ if request.user != package.packager and \
+ request.user not in package.maintainers:
+ return render(request, '403.html', status=403)
+
+ try:
+ spec = SignoffSpecification.objects.get_from_package(package)
+ except SignoffSpecification.DoesNotExist:
+ # create a fake one, but don't save it just yet
+ spec = SignoffSpecification(pkgbase=package.pkgbase,
+ pkgver=package.pkgver, pkgrel=package.pkgrel,
+ epoch=package.epoch, arch=package.arch, repo=package.repo)
+
+ if spec.user is None:
+ spec.user = request.user
+
+ if request.POST:
+ form = SignoffOptionsForm(request.POST, instance=spec)
+ if form.is_valid():
+ if form.cleaned_data['apply_all']:
+ _signoff_options_all(request, name, repo)
+ else:
+ form.save()
+ return redirect('package-signoffs')
+ else:
+ form = SignoffOptionsForm(instance=spec)
+
+ context = {
+ 'packages': packages,
+ 'package': package,
+ 'form': form,
+ }
+ return direct_to_template(request, 'packages/signoff_options.html', context)
+
+class SignoffJSONEncoder(DjangoJSONEncoder):
+ '''Base JSONEncoder extended to handle all serialization of all classes
+ related to signoffs.'''
+ signoff_group_attrs = ['arch', 'last_update', 'maintainers', 'packager',
+ 'pkgbase', 'repo', 'signoffs', 'target_repo', 'version']
+ signoff_spec_attrs = ['required', 'enabled', 'known_bad', 'comments']
+ signoff_attrs = ['user', 'created', 'revoked']
+
+ def default(self, obj):
+ if isinstance(obj, PackageSignoffGroup):
+ data = dict((attr, getattr(obj, attr))
+ for attr in self.signoff_group_attrs)
+ data['pkgnames'] = [p.pkgname for p in obj.packages]
+ data['package_count'] = len(obj.packages)
+ data['approved'] = obj.approved()
+ data.update((attr, getattr(obj.specification, attr))
+ for attr in self.signoff_spec_attrs)
+ return data
+ elif isinstance(obj, Signoff):
+ data = dict((attr, getattr(obj, attr))
+ for attr in self.signoff_attrs)
+ return data
+ elif isinstance(obj, Arch) or isinstance(obj, Repo):
+ return unicode(obj)
+ elif isinstance(obj, User):
+ return obj.username
+ elif isinstance(obj, set):
+ return list(obj)
+ return super(SignoffJSONEncoder, self).default(obj)
+
+@permission_required('main.change_package')
+@never_cache
+def signoffs_json(request):
+ signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase'))
+ data = {
+ 'version': 1,
+ 'signoff_groups': signoff_groups,
+ }
+ to_json = simplejson.dumps(data, ensure_ascii=False,
+ cls=SignoffJSONEncoder)
+ response = HttpResponse(to_json, mimetype='application/json')
+ return response
+
+# vim: set ts=4 sw=4 et:
diff --git a/public/utils.py b/public/utils.py
index 5900c674..30c76ac1 100644
--- a/public/utils.py
+++ b/public/utils.py
@@ -1,6 +1,6 @@
from operator import attrgetter
-from main.models import Arch, Package, Repo
+from main.models import Arch, Package
from main.utils import cache_function, groupby_preserve_order, PackageStandin
class RecentUpdate(object):
diff --git a/public/views.py b/public/views.py
index c10be35f..af46e343 100644
--- a/public/views.py
+++ b/public/views.py
@@ -1,19 +1,18 @@
-from main.models import Arch, Repo, Donor
-from mirrors.models import MirrorUrl
-from news.models import News
-from . import utils
-
from django.conf import settings
from django.contrib.auth.models import User
-from django.db.models import Q
from django.http import Http404
+from django.shortcuts import redirect
from django.views.generic import list_detail
from django.views.generic.simple import direct_to_template
-from django.shortcuts import redirect
+from devel.models import MasterKey
+from main.models import Arch, Repo, Donor
+from mirrors.models import MirrorUrl
+from news.models import News
+from utils import get_recent_updates
def index(request):
- pkgs = utils.get_recent_updates()
+ pkgs = get_recent_updates()
context = {
'news_updates': News.objects.order_by('-postdate', '-id')[:15],
'pkg_updates': pkgs,
@@ -31,16 +30,18 @@ USER_LISTS = {
},
}
-def userlist(request, type='hackers'):
- users = User.objects.order_by('username').select_related('userprofile')
- if type == 'hackers':
+def userlist(request, user_type='hackers'):
+ users = User.objects.order_by(
+ 'username').select_related('userprofile')
+ if user_type == 'hackers':
users = users.filter(is_active=True, groups__name="Hackers")
- elif type == 'fellows':
- users = users.filter(is_active=False, groups__name__in=["Hackers"])
+ elif user_type == 'fellows':
+ users = users.filter(is_active=False,
+ groups__name__in=["Hackers"])
else:
raise Http404
- context = USER_LISTS[type].copy()
+ context = USER_LISTS[user_type].copy()
context['users'] = users
return direct_to_template(request, 'public/userlist.html', context)
@@ -51,7 +52,7 @@ def donate(request):
return direct_to_template(request, 'public/donate.html', context)
def download(request):
- return redirect('http://wiki.parabolagnulinux.org/get', permanent=True)
+ return redirect('//wiki.parabolagnulinux.org/get', permanent=True)
def feeds(request):
context = {
@@ -60,4 +61,11 @@ def feeds(request):
}
return direct_to_template(request, 'public/feeds.html', context)
+def keys(request):
+ context = {
+ 'keys': MasterKey.objects.select_related('owner', 'revoker',
+ 'owner__userprofile', 'revoker__userprofile').all(),
+ }
+ return direct_to_template(request, 'public/keys.html', context)
+
# vim: set ts=4 sw=4 et:
diff --git a/releng/views.py b/releng/views.py
index 2b3d0936..e17a6e9c 100644
--- a/releng/views.py
+++ b/releng/views.py
@@ -42,7 +42,7 @@ class TestForm(forms.ModelForm):
success = forms.BooleanField(
help_text="Only check this if everything went fine. " \
"If you ran into problems please create a ticket on <a " \
- "href=\"https://bugs.archlinux.org/index.php?project=6\">the " \
+ "href=\"//bugs.parabolagnulinux.org/\">the " \
"bugtracker</a> (or check that one already exists) and link to " \
"it in the comments.",
required=False)
diff --git a/requirements.txt b/requirements.txt
index 27fda229..5704435e 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,5 @@
Django==1.3.1
Markdown==2.0.3
South==0.7.3
-pytz>=2011c
+pyinotify==0.9.2
+pytz>=2011n
diff --git a/requirements_prod.txt b/requirements_prod.txt
index 9749a072..47d37ce2 100644
--- a/requirements_prod.txt
+++ b/requirements_prod.txt
@@ -2,5 +2,6 @@ Django==1.3.1
Markdown==2.0.3
MySQL-python==1.2.3
South==0.7.3
+pyinotify==0.9.2
python-memcached==1.47
-pytz>=2011c
+pytz>=2011n
diff --git a/settings.py b/settings.py
index 63c30412..ed12bbec 100644
--- a/settings.py
+++ b/settings.py
@@ -13,7 +13,7 @@ ADMINS = ()
MANAGERS = ADMINS
# Package out-of-date emails for orphans
-NOTIFICATIONS = ['packages@list.parabolagnulinux.org']
+NOTIFICATIONS = ['dev@lists.parabolagnulinux.org']
# Full path to the data directory
DEPLOY_PATH = os.path.dirname(os.path.realpath(__file__))
@@ -111,6 +111,7 @@ INSTALLED_APPS = (
'releng',
)
+## Server used for linking to PGP keysearch results
PGP_SERVER = 'pgp.mit.edu:11371'
## Import local settings
@@ -131,6 +132,6 @@ if DEBUG_TOOLBAR:
INSTALLED_APPS = list(INSTALLED_APPS) + [ 'debug_toolbar' ]
# URL to fetch a current list of available ISOs
-ISO_LIST_URL = 'http://repo.parabolagnulinux.org/isos/'
+ISO_LIST_URL = 'https://repo.parabolagnulinux.org/isos/'
# vim: set ts=4 sw=4 et:
diff --git a/sitemaps.py b/sitemaps.py
index 8ac5bc4f..958d1f44 100644
--- a/sitemaps.py
+++ b/sitemaps.py
@@ -3,7 +3,7 @@ from django.core.urlresolvers import reverse
from main.models import Package
from news.models import News
-from packages.utils import get_group_info
+from packages.utils import get_group_info, get_split_packages_info
class PackagesSitemap(Sitemap):
changefreq = "weekly"
@@ -41,6 +41,21 @@ class PackageGroupsSitemap(Sitemap):
return '/groups/%s/%s/' % (obj['arch'], obj['name'])
+class SplitPackagesSitemap(Sitemap):
+ changefreq = "weekly"
+ priority = "0.3"
+
+ def items(self):
+ return get_split_packages_info()
+
+ def lastmod(self, obj):
+ return obj['last_update']
+
+ def location(self, obj):
+ return '/packages/%s/%s/%s/' % (
+ obj['repo'].name.lower(), obj['arch'], obj['pkgbase'])
+
+
class NewsSitemap(Sitemap):
changefreq = "never"
priority = "0.8"
@@ -56,10 +71,24 @@ class BaseSitemap(Sitemap):
base_viewnames = (
('index', 1.0, 'hourly'),
('packages-search', 0.8, 'hourly'),
- 'page-about', 'page-art', 'page-svn', 'page-devs', 'page-tus',
- 'page-fellows', 'page-donate', 'page-download', 'news-list',
- 'feeds-list', 'groups-list', 'mirror-list', 'mirror-status',
- 'mirrorlist', 'packages-differences', 'releng-test-overview',
+ ('page-keys', 0.8, 'weekly'),
+ ('news-list', 0.7, 'weekly'),
+ ('groups-list', 0.5, 'weekly'),
+ ('mirror-status', 0.4, 'hourly'),
+ 'page-about',
+ 'page-art',
+ 'page-svn',
+ 'page-devs',
+ 'page-tus',
+ 'page-fellows',
+ 'page-donate',
+ 'page-download',
+ 'feeds-list',
+ 'mirror-list',
+ 'mirrorlist',
+ 'packages-differences',
+ 'releng-test-overview',
+ 'visualize-index',
)
def items(self):
diff --git a/templates/base.html b/templates/base.html
index ca492281..746c6cf4 100644
--- a/templates/base.html
+++ b/templates/base.html
@@ -19,10 +19,10 @@
<ul id="archnavbarlist">
<li id="anb-home"><a href="/" title="Parabola news, packages, projects and more">Home</a></li>
<li id="anb-packages"><a href="/packages/" title="Package Database">Packages</a></li>
- <li id="anb-wiki"><a href="http://wiki.parabolagnulinux.org" title="Community documentation">Wiki</a></li>
- <li id="anb-bugs"><a href="https://bugs.parabolagnulinux.org" title="Issue Tracker">Bugs</a></li>
- <li id="anb-projects"><a href="https://projects.parabolagnulinux.org" title="Our Code">Projects</a></li>
- <li id="anb-download"><a href="http://wiki.parabolagnulinux.org/Download" title="Get Parabola">Download</a></li>
+ <li id="anb-wiki"><a href="//wiki.parabolagnulinux.org" title="Community documentation">Wiki</a></li>
+ <li id="anb-bugs"><a href="//bugs.parabolagnulinux.org" title="Issue Tracker">Bugs</a></li>
+ <li id="anb-projects"><a href="//projects.parabolagnulinux.org" title="Our Code">Projects</a></li>
+ <li id="anb-download"><a href="//wiki.parabolagnulinux.org/Download" title="Get Parabola">Download</a></li>
</ul>
</div>
</div><!-- #archnavbar -->
@@ -32,14 +32,14 @@
{% if user.is_authenticated %}
<ul>
<li><a href="/devel/" title="Developer Dashboard">Dashboard</a></li>
- <li><a href="https://projects.parabolagnulinux.org/" title="Git Projects">Projects</a></li>
+ <li><a href="//projects.parabolagnulinux.org/" title="Git Projects">Projects</a></li>
<li><a href="{% url news-list as newsl %}{{ newsl }}" title="Manage news articles">News</a></li>
<li><a href="/packages/signoffs/" title="Package signoffs">Signoffs</a></li>
<li><a href="/todo/" title="Developer todo lists">Todos</a></li>
<li><a href="/packages/differences/" title="Package
architecture differences">Architecture Differences</a></li>
<li><a
- href="http://list.parabolagnulinux.org/pipermail/dev-parabolagnulinux.org/"
+ href="//lists.parabolagnulinux.org/pipermail/dev/"
title="dev mailing list archives">Archives</a></li>
<li><a href="/devel/clock/" title="Developer world clocks">Dev Clocks</a></li>
{% if user.is_staff %}
diff --git a/templates/devel/clock.html b/templates/devel/clock.html
index cbf4b834..2eafd529 100644
--- a/templates/devel/clock.html
+++ b/templates/devel/clock.html
@@ -45,7 +45,7 @@
<script type="text/javascript">
$(document).ready(function() {
$("#clocks-table:has(tbody tr)").tablesorter(
- {widgets: ['zebra'], sortList: [[1,0]]});
+ {widgets: ['zebra'], sortList: [[0,0]]});
});
</script>
{% endblock %}
diff --git a/templates/devel/index.html b/templates/devel/index.html
index 08f5ec1b..ad024a34 100644
--- a/templates/devel/index.html
+++ b/templates/devel/index.html
@@ -15,8 +15,8 @@
<thead>
<tr>
<th>Name</th>
- <th>Repo</th>
<th>Version</th>
+ <th>Repo</th>
<th>Arch</th>
<th>Flagged</th>
<th>Last Updated</th>
@@ -26,14 +26,14 @@
{% for pkg in flagged %}
<tr class="{% cycle 'odd' 'even' %}">
<td>{% pkg_details_link pkg %}</td>
- <td>{{ pkg.repo.name }}</td>
<td>{{ pkg.full_version }}</td>
+ <td>{{ pkg.repo.name }}</td>
<td>{{ pkg.arch.name }}</td>
<td>{{ pkg.flag_date|date }}</td>
<td>{{ pkg.last_update|date }}</td>
</tr>
{% empty %}
- <tr class="empty"><td colspan="4"><em>No flagged packages to display</em></td></tr>
+ <tr class="empty"><td colspan="6"><em>No flagged packages to display</em></td></tr>
{% endfor %}
</tbody>
</table>
@@ -78,7 +78,6 @@
<th>Package Count</th>
<th>Incomplete Count</th>
</tr>
- </tr>
</thead>
<tbody>
{% for todo in todos %}
@@ -92,7 +91,50 @@
<td>{{ todo.incomplete_count }}</td>
</tr>
{% empty %}
- <tr class="empty"><td colspan="3"><em>No package todo lists to display</em></td></tr>
+ <tr class="empty"><td colspan="6"><em>No package todo lists to display</em></td></tr>
+ {% endfor %}
+ </tbody>
+ </table>
+
+ <h3>Signoff Status</h3>
+
+ <table id="dash-signoffs" class="results">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Version</th>
+ <th>Arch</th>
+ <th>Target Repo</th>
+ <th>Last Updated</th>
+ <th>Approved</th>
+ <th>Signoffs</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for group in signoffs %}
+ <tr class="{% cycle 'odd' 'even' %}">
+ <td>{% pkg_details_link group.package %}</td>
+ <td>{{ group.version }}</td>
+ <td>{{ group.arch.name }}</td>
+ <td>{{ group.target_repo }}</td>
+ <td>{{ group.last_update|date }}</td>
+ {% if group.specification.known_bad %}
+ <td class="approval signoff-bad">Bad</td>
+ {% else %}
+ {% if not group.specification.enabled %}
+ <td class="approval signoff-disabled">Disabled</td>
+ {% else %}
+ <td class="approval signoff-{{ group.approved|yesno }}">{{ group.approved|yesno|capfirst }}</td>
+ {% endif %}
+ {% endif %}
+ <td><ul class="signoff-list">
+ {% for signoff in group.signoffs %}
+ <li class="signed-username" title="Signed off by {{ signoff.user }}">{{ signoff.user }}{% if signoff.revoked %} (revoked){% endif %}</li>
+ {% endfor %}
+ </ul></td>
+ </tr>
+ {% empty %}
+ <tr class="empty"><td colspan="7"><em>No packages you maintain or have packaged need signoffs</em></td></tr>
{% endfor %}
</tbody>
</table>
@@ -135,6 +177,7 @@
<th class="key">Arch</th>
<th># Packages</th>
<th># Flagged</th>
+ <th># Signed</th>
</tr>
</thead>
<tbody>
@@ -147,6 +190,8 @@
<td><a href="/packages/?arch={{ arch.name }}&amp;flagged=Flagged"
title="View all flagged packages for the {{ arch.name }} architecture">
<strong>{{ arch.packages.flagged.count }}</strong> packages</a></td>
+
+ <td><strong>{{ arch.packages.signed.count }}</strong> packages</td>
</tr>
{% endfor %}
</tbody>
@@ -165,6 +210,7 @@
<th class="key">Repository</th>
<th># Packages</th>
<th># Flagged</th>
+ <th># Signed</th>
</tr>
</thead>
<tbody>
@@ -177,6 +223,7 @@
<td><a href="/packages/?repo={{ repo.name }}&amp;flagged=Flagged"
title="View all flagged packages in the {{ repo.name }} repository">
<strong>{{ repo.packages.flagged.count }}</strong> packages</a></td>
+ <td><strong>{{ repo.packages.signed.count }}</strong> packages</td>
</tr>
{% endfor %}
</tbody>
@@ -251,6 +298,11 @@ $(document).ready(function() {
{widgets: ['zebra'], sortList: [[0,0], [1,0]]});
$("#dash-todo:not(:has(tbody tr.empty))").tablesorter(
{widgets: ['zebra'], sortList: [[1,1]]});
+ $("#dash-signoffs:not(:has(tbody tr.empty))").tablesorter({
+ widgets: ['zebra'],
+ sortList: [[0,0]],
+ headers: { 6: {sorter: false } }
+ });
$(".dash-stats").tablesorter({
widgets: ['zebra'],
sortList: [[0,0]],
diff --git a/templates/devel/profile.html b/templates/devel/profile.html
index b731b3a7..b497a20a 100644
--- a/templates/devel/profile.html
+++ b/templates/devel/profile.html
@@ -6,7 +6,7 @@
<h2>Developer Profile</h2>
- <form id="edit-profile-form" enctype="multipart/form-data" method="post">{% csrf_token %}
+ <form id="edit-profile-form" enctype="multipart/form-data" method="post" action="">{% csrf_token %}
<p><em>Note:</em> This is the public information shown on the developer
and/or TU profiles page, so please be appropriate with the information
you provide here.</p>
diff --git a/templates/packages/details.html b/templates/packages/details.html
index 04fd5758..4570627f 100644
--- a/templates/packages/details.html
+++ b/templates/packages/details.html
@@ -16,10 +16,8 @@
<div id="actionlist">
<h4>Package Actions</h4>
<ul class="small">
- <li>
- <a href="{% bugs_list pkg %}" title="View existing bug tickets for {{ pkg.pkgname }}">Bug Reports</a> /
- <a href="{% bug_report pkg %}" title="Report new bug for {{ pkg.pkgname }}">Add New Bug</a>
- </li>
+ <li><a href="{% bugs_list pkg %}" title="View existing bug tickets for {{ pkg.pkgname }}">Bug Reports</a></li>
+ <li><a href="{% bug_report pkg %}" title="Report bug for {{ pkg.pkgname }}">Report a Bug</a></li>
<li><a href="{% flag_unfree pkg %}" title="Report {{ pkg.pkgname }} as unfree">Report as unfree</a></li>
<li><a href="{% get_wiki_link pkg %}" title="Search wiki for {{ pkg.pkgname }}">Search Wiki</a></li>
{% if pkg.flag_date %}
@@ -41,6 +39,7 @@
onclick="return !window.open('/packages/flaghelp/','FlagHelp',
'height=350,width=450,location=no,scrollbars=yes,menubars=no,toolbars=no,resizable=no');">(?)</a></li>
{% endif %}
+ <li><a href="{% get_download_link pkg %}" rel="nofollow" title="Download {{ pkg.pkgname }} from mirror">Download From Mirror</a></li>
</ul>
{% if perms.main.change_package %}
@@ -104,7 +103,7 @@
</tr><tr>
<th>Upstream URL:</th>
<td>{% if pkg.url %}<a href="{{ pkg.url }}"
- title="Visit the website for {{ pkg.pkgname }}">{{ pkg.url }}</a>{% endif %}</td>
+ title="Visit the website for {{ pkg.pkgname }}">{{ pkg.url|url_unquote }}</a>{% endif %}</td>
</tr><tr>
<th>License(s):</th>
<td>{{ pkg.licenses.all|join:", " }}</td>
diff --git a/templates/packages/differences.html b/templates/packages/differences.html
index 69c39756..0412f8c2 100644
--- a/templates/packages/differences.html
+++ b/templates/packages/differences.html
@@ -6,6 +6,35 @@
{% if differences %}
<div id="differences-filter" class="box filter-criteria">
<h2>Package Differences by Architecture</h2>
+ <h3>Select architectures</h3>
+ <form id="arch_selector" method="get" action=".">
+ <fieldset>
+ <legend>Select arches</legend>
+ <div><label for="arch_a" title="Architecture A">Architecture A</label>
+ <select name="arch_a" id="arch_a">
+ {% for arch in arches %}
+ <option
+ {% if arch == arch_a %}
+ selected="selected"
+ {% endif %}
+ >{{ arch }}</option>
+ {% endfor %}
+ </select>
+ </div>
+ <div><label for="arch_b" title="Architecture B">Architecture B</label>
+ <select name="arch_b" id="arch_b">
+ {% for arch in arches %}
+ <option
+ {% if arch == arch_b %}
+ selected="selected"
+ {% endif %}
+ >{{ arch }}</option>
+ {% endfor %}
+ </select>
+ </div>
+ <div><label>&nbsp;</label><input type="submit" title="Show difference between selected architectures"></div>
+ </fieldset>
+ </form>
<h3>Filter Differences View</h3>
<form id="diff_filter" method="post" action=".">
<fieldset>
@@ -65,7 +94,7 @@ $(document).ready(function() {
$('.results').tablesorter({widgets: ['zebra'], sortList: [[1,0], [0,0]]});
$('#diff_filter select').change(filter_packages);
$('#diff_filter input').change(filter_packages);
- $('#criteria_reset').click(filter_reset);
+ $('#criteria_reset').click(filter_packages_reset);
// fire function on page load to ensure the current form selections take effect
filter_packages();
});
diff --git a/templates/packages/files.html b/templates/packages/files.html
index 149154a6..78a40def 100644
--- a/templates/packages/files.html
+++ b/templates/packages/files.html
@@ -1,5 +1,5 @@
{% extends "base.html" %}
-{% block title %}Parabola - {{ pkg.pkgname }} {{ pkg.full_version }} - Package File List{% endblock %}
+{% block title %}Parabola - {{ pkg.pkgname }} {{ pkg.full_version }} ({{ pkg.arch.name }}) - File List{% endblock %}
{% block navbarclass %}anb-packages{% endblock %}
{% block content %}
diff --git a/templates/packages/flag.html b/templates/packages/flag.html
index f439ca36..27ff73b0 100644
--- a/templates/packages/flag.html
+++ b/templates/packages/flag.html
@@ -1,18 +1,19 @@
{% extends "base.html" %}
{% load package_extras %}
-{% block title %}Parabola - Flag Package - {{ package.pkgname }}{% endblock %}
+{% block title %}Parabola - Flag Package - {{ package.pkgname }} {{ package.full_version }} ({{ package.arch.name }}){% endblock %}
+{% block head %}<meta name="robots" content="noindex"/>{% endblock %}
{% block navbarclass %}anb-packages{% endblock %}
{% block content %}
<div id="pkg-flag" class="box">
- <h2>Flag Package: {{ package.pkgname }}</h2>
+ <h2>Flag Package: {{ package.pkgname }} {{ package.full_version }} ({{ package.arch.name }})</h2>
<p>If you notice a package is out-of-date (i.e., there is a newer
<strong>stable</strong> release available), then please notify us using
the form below. Do <em>not</em> report bugs via this form!</p>
- <p>Note that all of the following packages will be marked out of date:</p>
+ <p>Note that the following {{ packages|length }} package{{ packages|pluralize }} will be marked out of date:</p>
<ul>
{% for pkg in packages %}
<li>{% pkg_details_link pkg %} {{ pkg.full_version }} [{{ pkg.repo.name|lower }}] ({{ pkg.arch.name }})</li>
@@ -22,13 +23,13 @@
<p>The message box portion of the flag utility is optional, and meant
for short messages only. If you need more than 200 characters for your
message, then file a bug report, email the maintainer directly, or send
- an email to the <a href="http://list.parabolagnulinux.org/listinfo.cgi/dev-parabolagnulinux.org"
+ an email to the <a href="//lists.parabolagnulinux.org/mailman/listinfo/dev"
title="Visit the dev mailing list">Parabola Development mailing list</a>
with your additional text.</p>
<p><strong>Note:</strong> Do <em>not</em> use this facility if the
package is broken! The package will be unflagged and the report will be ignored!
- <a href="https://bugs.parabolagnulinux.org/" title="Parabola Bugtracker">Use the
+ <a href="//bugs.parabolagnulinux.org/" title="Parabola Bugtracker">Use the
bugtracker to file a bug</a> instead.</p>
<p>Please confirm your flag request for {{package.pkgname}}:</p>
diff --git a/templates/packages/flag_confirmed.html b/templates/packages/flag_confirmed.html
index cc743dd6..62080d62 100644
--- a/templates/packages/flag_confirmed.html
+++ b/templates/packages/flag_confirmed.html
@@ -1,14 +1,16 @@
{% extends "base.html" %}
{% load package_extras %}
-{% block title %}Parabola - Package Flagged - {{ package.pkgname }}{% endblock %}
+{% block title %}Parabola - Package Flagged - {{ package.pkgname }} {{ package.full_version }} ({{ package.arch.name }}){% endblock %}
+{% block head %}<meta name="robots" content="noindex"/>{% endblock %}
{% block navbarclass %}anb-packages{% endblock %}
{% block content %}
<div id="pkg-flag" class="box">
<h2>Package Flagged - {{ package.pkgname }}</h2>
- <p>Thank you, the maintainers have been notified the following packages are out-of-date:</p>
+ <p>Thank you, the maintainers have been notified the following
+ {{ packages|length }} package{{ packages|pluralize }} are out-of-date:</p>
<ul>
{% for pkg in packages %}
<li>{% pkg_details_link pkg %} {{ pkg.full_version }} [{{ pkg.repo.name|lower }}] ({{ pkg.arch.name }})</li>
diff --git a/templates/packages/flagged.html b/templates/packages/flagged.html
index d2328381..f7940cd1 100644
--- a/templates/packages/flagged.html
+++ b/templates/packages/flagged.html
@@ -1,12 +1,13 @@
{% extends "base.html" %}
{% load package_extras %}
-{% block title %}Parabola - Flag Package - {{ pkg.pkgname }}{% endblock %}
+{% block title %}Parabola - Flag Package - {{ pkg.pkgname }} {{ pkg.full_version }} ({{ pkg.arch.name }}){% endblock %}
+{% block head %}<meta name="robots" content="noindex"/>{% endblock %}
{% block navbarclass %}anb-packages{% endblock %}
{% block content %}
<div id="pkg-flagged-error" class="box">
- <h2>Error: Package already flagged</h2>
+ <h2>Package {{ pkg.pkgname }} {{ pkg.full_version }} ({{ pkg.arch.name }}) already flagged</h2>
<p><strong>{{pkg.pkgname}}</strong> has already been flagged out-of-date.</p>
diff --git a/templates/packages/flaghelp.html b/templates/packages/flaghelp.html
index 4a9d1cdf..eac13f83 100644
--- a/templates/packages/flaghelp.html
+++ b/templates/packages/flaghelp.html
@@ -25,13 +25,12 @@
<p>The message box portion of the flag utility is optional, and meant
for short messages only. If you need more than 200 characters for your
message, then file a bug report, email the maintainer directly, or send
- an email to the <a target="_blank"
- href="http://list.parabolagnulinux.org/listinfo.cgi/dev-parabolagnulinux.org"
+ an email to the <a target="_blank" href="//lists.parabolagnulinux.org/mailman/listinfo/dev"
title="Visit the parabola dev mailing list">parabola mailing list</a>
with your additional text.</p>
<p><strong>Note:</strong> Please do <em>not</em> use this facility if the
- package is broken! Use the <a target="_blank" href="https://bugs.parabolagnulinux.org"
+ package is broken! Use the <a target="_blank" href="//bugs.parabolagnulinux.org"
title="Parabola Bugtracker">bugtracker</a> instead.</p>
</body>
diff --git a/templates/packages/search.html b/templates/packages/search.html
index 8bf63a15..bb5c1c8b 100644
--- a/templates/packages/search.html
+++ b/templates/packages/search.html
@@ -1,10 +1,12 @@
{% extends "base.html" %}
{% load package_extras %}
+{% load adminmedia %}
+
{% block title %}Parabola - Package Database{% endblock %}
{% block navbarclass %}anb-packages{% endblock %}
{% block head %}
-<link rel="stylesheet" type="text/css" href="/media/admin_media/css/widgets.css" />
+<link rel="stylesheet" type="text/css" href="{% admin_media_prefix %}css/widgets.css" />
{% endblock %}
{% block content %}
@@ -104,7 +106,8 @@
{% endif %}
<td>{{ pkg.arch.name }}</td>
<td>{{ pkg.repo.name|capfirst }}</td>
- <td>{% pkg_details_link pkg %}</td>
+ <td><a href="{{ pkg.get_absolute_url }}"
+ title="Package details for {{ pkg.pkgname }}">{{ pkg.pkgname }}</a></td>
{% if pkg.flag_date %}
<td><span class="flagged">{{ pkg.full_version }}</span></td>
{% else %}
@@ -153,21 +156,20 @@
</div><!-- #pkglist-results -->
{% else %}
<div class="box">
- <p>We couldn't find any packages matching your query. Try searching again
- using different criteria.</p>
+ <p>We couldn't find any packages matching your query. Try searching again
+ using different criteria.</p>
</div>
{% endif %}
<div id="pkglist-about" class="box">
- <p>You are browsing the Parabola package database. From here you can
- find detailed information about packages located in the official
- supported repositories. If you need the sourceball from where a
- package is built, you can look at our <a
- href='http://repo.parabolagnulinux.org/sources/packages'
+ <p>You are browsing the Parabola package database. From here you can find
+ detailed information about packages located in the official supported repositories.
+ If you need the sourceball from where a package is built, you can look at our <a
+ href='//repo.parabolagnulinux.org/sources/packages'
title='Sourceballed packages'>sources repo</a>.</p> </div>
<script type="text/javascript" src="/jsi18n/"></script>
-{% load adminmedia %}<script type="text/javascript" src="{% admin_media_prefix %}js/core.js"></script>
<script type="text/javascript">window.__admin_media_prefix__ = "{% filter escapejs %}{% admin_media_prefix %}{% endfilter %}";</script>
+<script type="text/javascript" src="{% admin_media_prefix %}js/core.js"></script>
{{search_form.media}}
{% endblock %}
diff --git a/templates/packages/signoff_cell.html b/templates/packages/signoff_cell.html
new file mode 100644
index 00000000..01a5d58d
--- /dev/null
+++ b/templates/packages/signoff_cell.html
@@ -0,0 +1,25 @@
+{% spaceless %}
+{% if group.signoffs %}
+<ul class="signoff-list">
+ {% for signoff in group.signoffs %}
+ <li class="signed-username" title="Signed off by {{ signoff.user }}">{{ signoff.user }}{% if signoff.revoked %} (revoked){% endif %}</li>
+ {% endfor %}
+</ul>
+{% endif %}
+{% if group.user_signed_off %}
+<div>
+ <a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/revoke/"
+ title="Revoke signoff {{ group.pkgbase }} for {{ group.arch }}">Revoke Signoff</a></div>
+{% else %}
+{% if not group.specification.known_bad and group.specification.enabled %}
+<div>
+ <a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/"
+ title="Signoff {{ group.pkgbase }} for {{ group.arch }}">Signoff</a></div>
+{% endif %}
+{% endif %}
+{% if user == group.packager or user in group.maintainers %}
+<div>
+ <a class="signoff-options" href="{{ group.package.get_absolute_url }}signoff/options/">Signoff Options</a>
+</div>
+{% endif %}
+{% endspaceless %}
diff --git a/templates/packages/signoff_options.html b/templates/packages/signoff_options.html
new file mode 100644
index 00000000..ee9b8b47
--- /dev/null
+++ b/templates/packages/signoff_options.html
@@ -0,0 +1,18 @@
+{% extends "base.html" %}
+
+{% block title %}Arch Linux - Package Signoff Options - {{ package.pkgbase }} {{ package.full_version }} ({{ package.arch.name }}){% endblock %}
+{% block head %}<meta name="robots" content="noindex"/>{% endblock %}
+{% block navbarclass %}anb-packages{% endblock %}
+
+{% block content %}
+<div id="signoff-options" class="box">
+ <h2>Package Signoff Options: {{ package.pkgbase }} {{ package.full_version }} ({{ package.arch.name }})</h2>
+ <form id="signoff-options-form" method="post">{% csrf_token %}
+ <fieldset>
+ {{ form.as_p }}
+ </fieldset>
+ <p><label></label> <input title="Set Signoff Options" type="submit" value="Set Signoff Options" /></p>
+ </form>
+
+</div>
+{% endblock %}
diff --git a/templates/packages/signoff_report.txt b/templates/packages/signoff_report.txt
new file mode 100644
index 00000000..046c2f1e
--- /dev/null
+++ b/templates/packages/signoff_report.txt
@@ -0,0 +1,41 @@
+=== {% autoescape off %}Signoff report for [{{ repo|lower }}] ===
+{{ signoffs_url }}
+
+There are currently:
+* {{ new|length }} new package{{ new|length|pluralize }} in last {{ new_hours }} hours
+* {{ bad|length }} known bad package{{ bad|length|pluralize }}
+* {{ disabled|length }} package{{ disabled|length|pluralize }} not accepting signoffs
+* {{ complete|length }} fully signed off package{{ complete|length|pluralize }}
+* {{ incomplete|length }} package{{ incomplete|length|pluralize }} missing signoffs
+* {{ old|length }} package{{ old|length|pluralize }} older than {{ old_days }} days
+
+(Note: the word 'package' as used here refers to packages as grouped by
+pkgbase, architecture, and repository; e.g., one PKGBUILD produces one
+package per architecture, even if it is a split package.)
+
+
+{% if new %}== New packages in [{{ repo|lower}}] in last {{ new_hours }} hours ({{ new|length }} total) ==
+{% for group in new %}
+* {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}){% endfor %}
+
+{% endif %}{% regroup incomplete by target_repo as by_repo %}{% for target_repo in by_repo %}
+== Incomplete signoffs for [{{ target_repo.grouper|lower }}] ({{ target_repo.list|length }} total) ==
+{% for group in target_repo.list %}
+* {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }})
+ {{ group.completed }}/{{ group.required }} signoffs{% endfor %}
+{% endfor %}
+
+{% if complete %}== Completed signoffs ({{ complete|length }} total) ==
+{% for group in complete %}
+* {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}){% endfor %}
+
+
+{% endif %}{% if old %}== All packages in [{{ repo|lower }}] for more than {{ old_days }} days ({{ old|length }} total) ==
+{% for group in old %}
+* {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}), since {{ group.last_update|date }}{% endfor %}
+
+
+{% endif %}== Top five in signoffs in last {{ new_hours }} hours ==
+{% for leader in leaders %}
+{{ forloop.counter }}. {{ leader.user }} - {{ leader.count }} signoffs{% endfor %}
+{% endautoescape %}
diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html
index 4745ff53..c24774a9 100644
--- a/templates/packages/signoffs.html
+++ b/templates/packages/signoffs.html
@@ -9,47 +9,71 @@
<h2>Package Signoffs</h2>
- <p>{{ signoff_groups|length }} signoff group{{ signoff_groups|pluralize }} found.
+ <p>{{ signoff_groups|length }} total signoff group{{ signoff_groups|pluralize }} found.
A "signoff group" consists of packages grouped by pkgbase, architecture, and repository.</p>
+ <div class="box filter-criteria">
+ <h3>Filter Displayed Signoffs</h3>
+ <form id="signoffs_filter" method="post" action=".">
+ <fieldset>
+ <legend>Select filter criteria</legend>
+ {% for arch in arches %}
+ <div><label for="id_arch_{{ arch.name }}" title="Architecture {{ arch.name }}">Arch {{ arch.name }}</label>
+ <input type="checkbox" name="arch_{{ arch.name }}" id="id_arch_{{ arch.name }}" class="arch_filter" value="{{ arch.name }}" checked="checked"/></div>
+ {% endfor %}
+ {% for repo_name in repo_names %}
+ <div><label for="id_repo_{{ repo_name|lower }}" title="Target Repository {{ repo_name }}">[{{ repo_name|lower }}]</label>
+ <input type="checkbox" name="repo_{{ repo_name|lower }}" id="id_repo_{{ repo_name|lower }}" class="repo_filter" value="{{ repo_name|lower }}" checked="checked"/></div>
+ {% endfor %}
+ <div><label for="id_pending" title="Packages with not enough signoffs">Only Pending Approval</label>
+ <input type="checkbox" name="pending" id="id_pending" value="pending"/></div>
+ <div><label>&nbsp;</label><input title="Reset search criteria" type="button" id="criteria_reset" value="Reset"/></div>
+ <div class="clear"></div>
+ <div id="filter-info"><span id="filter-count">{{ signoff_groups|length }}</span> signoff groups displayed.</div>
+ </fieldset>
+ </form>
+ </div>
+
<table id="signoffs" class="results">
<thead>
<tr>
+ <th>Package Base/Version</th>
<th>Arch</th>
- <th>Package Base</th>
+ <th>Target Repo</th>
+ <th>Packager</th>
<th># of Packages</th>
- <th>Version</th>
<th>Last Updated</th>
- <th>Target Repo</th>
<th>Approved</th>
- <th>Signoff</th>
+ <th>Signoffs</th>
+ <th>Notes</th>
</tr>
</thead>
- <tbody>
+ <tbody id="tbody_signoffs">
{% for group in signoff_groups %}
- {% with group.package as pkg %}
- <tr class="{% cycle 'odd' 'even' %}">
- <td>{{ pkg.arch.name }}</td>
- <td>{% pkg_details_link pkg %}</td>
- <td>{{ group.packages|length }}</td>
- <td>{{ pkg.full_version }}</td>
- <td>{{ pkg.last_update|date }}</td>
+ <tr class="{% cycle 'odd' 'even' %} {{ group.arch.name }} {{ group.target_repo|lower }}">
+ <td>{% pkg_details_link group.package %} {{ group.version }}</td>
+ <td>{{ group.arch.name }}</td>
<td>{{ group.target_repo }}</td>
- <td class="signoff-{{ group.approved|yesno }}">
- {{ group.approved|yesno|capfirst }}</td>
- <td>
- <ul>
- <li><a class="signoff-link" href="{{ pkg.get_absolute_url }}signoff/"
- title="Signoff {{ pkg.pkgname }} for {{ pkg.arch }}">Signoff</a>
- </li>
- {% for signoff in group.signoffs %}
- <li class="signed-username" title="Signed off by {{ signoff.user }}">
- {{ signoff.user }}{% if signoff.revoked %} (revoked){% endif %}</li>
- {% endfor %}
- </ul>
- </td>
+ <td>{{ group.packager|default:"Unknown" }}</td>
+ <td>{{ group.packages|length }}</td>
+ <td class="epoch-{{ group.last_update|date:'U' }}">{{ group.last_update|date }}</td>
+ {% if group.specification.known_bad %}
+ <td class="approval signoff-bad">Bad</td>
+ {% else %}
+ {% if not group.specification.enabled %}
+ <td class="approval signoff-disabled">Disabled</td>
+ {% else %}
+ <td class="approval signoff-{{ group.approved|yesno }}">{{ group.approved|yesno|capfirst }}</td>
+ {% endif %}
+ {% endif %}
+ <td>{% include "packages/signoff_cell.html" %}</td>
+ <td class="wrap">{% if not group.default_spec %}{% with group.specification as spec %}
+ {% if spec.required != 2 %}Required signoffs: {{ spec.required }}<br/>{% endif %}
+ {% if not spec.enabled %}Signoffs are not currently enabled<br/>{% endif %}
+ {% if spec.known_bad %}Package is known to be bad<br/>{% endif %}
+ {{ spec.comments|default:""|linebreaksbr }}
+ {% endwith %}{% endif %}</td>
</tr>
- {% endwith %}
{% endfor %}
</tbody>
</table>
@@ -60,8 +84,12 @@
<script type="text/javascript">
$(document).ready(function() {
$('a.signoff-link').click(signoff_package);
- $(".results").tablesorter({widgets: ['zebra'], sortList: [[1,0]],
- headers: { 6: { sorter: false } } });
+ $(".results").tablesorter({widgets: ['zebra'], sortList: [[0,0]],
+ headers: { 5: { sorter: 'epochdate' }, 7: { sorter: false }, 8: {sorter: false } } });
+ $('#signoffs_filter input').change(filter_signoffs);
+ $('#criteria_reset').click(filter_signoffs_reset);
+ // fire function on page load to ensure the current form selections take effect
+ filter_signoffs();
});
</script>
{% endblock %}
diff --git a/templates/public/about.html b/templates/public/about.html
index 5c9dc16d..099d5513 100644
--- a/templates/public/about.html
+++ b/templates/public/about.html
@@ -53,9 +53,10 @@
<li>Host repositories. Mirrors are not abundant.</li>
- <li>Take a look at our <a href="http://wiki.parabolagnulinux.org/TODO" title="TODO">TODO list</a></li>
+ <li>Take a look at our <a href="//wiki.parabolagnulinux.org/TODO" title="TODO">TODO list</a></li>
</ul>
</div>
+<br /><br />
{% endblock %}
diff --git a/templates/public/art.html b/templates/public/art.html
index 68179f23..3a92b8b4 100644
--- a/templates/public/art.html
+++ b/templates/public/art.html
@@ -10,7 +10,7 @@
<p>You can help by creating artwork for Parabola GNU/Linux-libre.</p>
- <p>Send your designs to web@list.parabolagnulinux.org and state they are CC-by-sa
+ <p>Send your designs to dev@lists.parabolagnulinux.org and state they are CC-by-sa
or another free culture friendly license.</p>
diff --git a/templates/public/developer_list.html b/templates/public/developer_list.html
index 2abbbfe4..5aa4c6b2 100644
--- a/templates/public/developer_list.html
+++ b/templates/public/developer_list.html
@@ -4,7 +4,7 @@
<p>
{% for dev in dev_list %}
<a href="#{{ dev.username }}" title="Jump to profile for {{ dev.get_full_name }}">
- {{ dev.first_name }}{{ dev.last_name.0|capfirst}}</a> &nbsp;&nbsp;
+ {{ dev.first_name }} {{ dev.last_name }}</a> &nbsp;&nbsp;
{% endfor %}
</p>
</div>
@@ -21,7 +21,7 @@
<table class="bio bio-{{ dev.username }}" cellspacing="0">
<tr>
<th>Name:</th>
- <td>{{ dev.get_full_name }}</td>
+ <td>{{ dev.get_full_name }}{% if prof.latin_name %} ({{ prof.latin_name}}){% endif %}</td>
</tr><tr>
<th>Alias:</th>
<td>{{ prof.alias }}</td>
diff --git a/templates/public/donate.html b/templates/public/donate.html
index ef7f252d..c6e055f5 100644
--- a/templates/public/donate.html
+++ b/templates/public/donate.html
@@ -18,7 +18,7 @@
anything, because we are a really small community of hackers.</p>
<p>If you want, we have a pretty nice <a
- href='http://wiki.parabolagnulinux.org/TODO' title='The TODO
+ href='//wiki.parabolagnulinux.org/TODO' title='The TODO
list!'>TODO list</a> you can check to help us by donating some of your
time. That will be very much appreciated by us :)</p>
diff --git a/templates/public/download.html b/templates/public/download.html
index 5c5cf5bd..2e1024b0 100644
--- a/templates/public/download.html
+++ b/templates/public/download.html
@@ -9,7 +9,7 @@
<h2>Parabola Downloads</h2>
- {% with "2011.09.1" as version %}
+ {% with "2010.12.29" as version %}
<h3>Release Info</h3>
<p>All available images can be burned to a CD, mounted as an ISO file,
@@ -19,18 +19,18 @@
<ul>
<li><strong>Current Release:</strong> {{ version }}</li>
- <li><strong>Included Kernel:</strong> 3.0.3</li>
+ <li><strong>Included Kernel:</strong> 2.6.36.2</li>
<li><strong>Resources:</strong>
<ul>
<li><a
- href="http://list.parabolagnulinux.org/listinfo.cgi/dev-parabolagnulinux.org"
+ href="//lists.parabolagnulinux.org/mailman/listinfo/dev"
title="Parabola Hackers Discussion List">Mailing List</a></li>
</ul>
</li>
<li><strong>Instructions:</strong>
<ul>
<li><a
- href="http://wiki.parabolagnulinux.org/installation_guide"
+ href="//wiki.parabolagnulinux.org/installation_guide"
title="Official Installation Guide">Parabola Install Guide</a>.
</li>
</ul>
@@ -42,7 +42,7 @@
<p>If you are an Arch user, there is no need to download the ISO
to update your existing system to Parabola. You can just follow the
instructions in our wiki to convert your existing Arch system into a free
- as in freedom one. <a href='http://wiki.parabolagnulinux.org/migration'
+ as in freedom one. <a href='//wiki.parabolagnulinux.org/migration'
title='Migration Guide'>More here.</a></p>
<h3>BitTorrent Download (recommended)</h3>
@@ -68,12 +68,12 @@
</td>
<td class="cpu-arch">
- <a href="http://repo.parabolagnulinux.org/isos/i686/parabola-{{version}}-netinstall-i686.iso.torrent"
+ <a href="//repo.parabolagnulinux.org/isos/i686/parabola-{{version}}-netinstall-i686.iso.torrent"
title="Download for i686 architecture">Download</a>
</td>
<td class="cpu-arch">
- <a href="http://repo.parabolagnulinux.org/isos/x86_64/parabola-{{version}}-netinstall-x86_64.iso.torrent"
+ <a href="//repo.parabolagnulinux.org/isos/x86_64/parabola-{{version}}-netinstall-x86_64.iso.torrent"
title="Download for x86-64 architecture">Download</a>
</td>
@@ -90,35 +90,22 @@
<td class="wrap">
Downloads and installs packages versions via FTP for absolute freshness.
</td>
- </tr>
- <tr>
+ </tr><tr>
<td>
Core Image
- </td>
-
- <td class="cpu-arch">
- <a href="http://repo.parabolagnulinux.org/isos/i686/parabola-{{version}}-core-i686.iso.torrent"
+ </td><td class="cpu-arch">
+ <a href="//repo.parabolagnulinux.org/isos/i686/parabola-{{version}}-core-i686.iso.torrent"
title="Download for i686 architecture">Download</a>
- </td>
-
- <td class="cpu-arch">
- <a href="http://repo.parabolagnulinux.org/isos/x86_64/parabola-{{version}}-core-x86_64.iso.torrent"
+ </td><td class="cpu-arch">
+ <a href="//repo.parabolagnulinux.org/isos/x86_64/parabola-{{version}}-core-x86_64.iso.torrent"
title="Download for x86-64 architecture">Download</a>
- </td>
-
- <td class="magnet-link">
- <a
- href="magnet:?xt=urn:btih:d9bb9f9641a222d2d302988da95225f570bcdb6d&dn=parabola-2010.12.29-core-i686.iso&tr=http%3A%2F%2Ftracker.publicbt.com%2Fannounce"
+ </td><td class="magnet-link">
+ <a href="magnet:?xt=urn:btih:d9bb9f9641a222d2d302988da95225f570bcdb6d&dn=parabola-2010.12.29-core-i686.iso&tr=http%3A%2F%2Ftracker.publicbt.com%2Fannounce"
title="Magnet Link for ISO image">i686 Magnet</a>
- </td>
-
- <td class="magnet-link">
- <a
- href="magnet:?xt=urn:btih:fae6de60f0dfa703165e94df5a77e9bd607ef4cf&dn=parabola-2010.12.29-core-x86_64.iso&tr=http%3A%2F%2Ftracker.publicbt.com%2Fannounce"
+ </td><td class="magnet-link">
+ <a href="magnet:?xt=urn:btih:fae6de60f0dfa703165e94df5a77e9bd607ef4cf&dn=parabola-2010.12.29-core-x86_64.iso&tr=http%3A%2F%2Ftracker.publicbt.com%2Fannounce"
title="Magnet Link for ISO image">x86_64 Magnet</a>
- </td>
-
- <td class="wrap">
+ </td><td class="wrap">
Core packages are included on the media. Good for basic off-line installation.
</td>
</tr>
@@ -132,7 +119,7 @@
image matches the checksum from the MD5SUMS or SHA256SUMS file in the same
directory as the image.</p>
- <p><a href='http://repo.parabolagnulinux.org/isos/' title='Parabola ISOs
+ <p><a href='//repo.parabolagnulinux.org/isos/' title='Parabola ISOs
directory'>Go to the Parabola ISOs directory.</a></p>
{%endwith%}
diff --git a/templates/public/feeds.html b/templates/public/feeds.html
index 79e8a1aa..69789150 100644
--- a/templates/public/feeds.html
+++ b/templates/public/feeds.html
@@ -17,10 +17,10 @@
the Parabola staff.</p>
<p>The <a
- href="http://wiki.parabolagnulinux.org/feed.php"
+ href="//wiki.parabolagnulinux.org/index.php?title=Special:RecentChanges&feed=atom"
title="ParabolaWiki Recent Changes feed" class="rss">Parabola Wiki: Recent
changes feed</a> is also available to track document changes from the
- <a href="http://wiki.parabolagnulinux.org/" title="Parabola Wiki community
+ <a href="//wiki.parabolagnulinux.org/" title="Parabola Wiki community
documentation">Parabola Wiki</a>.</p>
<h3>Package Feeds</h3>
diff --git a/templates/public/https.html b/templates/public/https.html
index 7cfe44e9..e53dc8e9 100644
--- a/templates/public/https.html
+++ b/templates/public/https.html
@@ -19,9 +19,8 @@
<h3>I just want to get the ISOs</h3>
<p>You can proceed to our <a
- href="http://wiki.parabolagnulinux.org/get">ISOs download page</a> on <a
- href="http://wiki.parabolagnulinux.org/">our wiki</a> (which isn't secured
- yet, sadly).</p>
+ href="https://wiki.parabolagnulinux.org/get">ISOs download page</a> on <a
+ href="https://wiki.parabolagnulinux.org/">our wiki</a>.</p>
<h3>I want to do this the right way</h3>
diff --git a/templates/public/index.html b/templates/public/index.html
index c68baedb..36bb5484 100644
--- a/templates/public/index.html
+++ b/templates/public/index.html
@@ -112,38 +112,24 @@
<h4>Documentation</h4>
<ul>
- <li><a href="http://wiki.parabolagnulinux.org/"
+ <li><a href="//wiki.parabolagnulinux.org/"
title="Community documentation">Wiki</a></li>
- <li><a href="http://wiki.parabolagnulinux.org/installation_guide"
+ <li><a href="//wiki.parabolagnulinux.org/installation_guide"
title="Parabola Installation Guide">Official Parabola Installation Guide</a></li>
- <li><a href="http://wiki.parabolagnulinux.org/Migration"
+ <li><a href="//wiki.parabolagnulinux.org/Migration"
title="Free your Arch instalation">Migration from Archlinux</a></li>
</ul>
<h4>Community</h4>
<ul>
- <li>
- <a href="http://list.parabolagnulinux.org/listinfo.cgi"
- title="Community and developer mailing lists">Mailing
- Lists</a>
- </li>
-
- <li>
- <a
- href="http://list.parabolagnulinux.org/pipermail/dev-parabolagnulinux.org/"
- title="dev mailing list archives">Dev Archives</a>
- </li>
-
- <li>
- <a href="http://wiki.parabolagnulinux.org/IRC_Channels"
- title="Official and regional IRC communities">IRC Channels</a>
- </li>
-
- <li>
- <a href="http://identi.ca/group/parabola" title="Parabola at
- identi.ca">Identi.ca group</a>
- </li>
+ <li><a href="//lists.parabolagnulinux.org/"
+ title="Community and developer mailing lists">Mailing Lists</a></li>
+ <li><a href="//lists.parabolagnulinux.org/pipermail/dev/"
+ title="dev mailing list archives">Dev Archives</a></li>
+ <li><a href="//wiki.parabolagnulinux.org/IRC_Channels"
+ title="Official and regional IRC communities">IRC Channels</a></li>
+ <li><a href="http://identi.ca/group/parabola" title="Parabola at identi.ca">Identi.ca group</a></li>
</ul>
<h4>Support</h4>
@@ -162,13 +148,15 @@
<h4>Development</h4>
<ul>
+ <li><a href="{% url page-keys %}"
+ title="Package/Database signing master keys">Master Keys</a></li>
<li><a href="/packages/"
title="View/search the package repository database">Packages</a></li>
<li><a href="/groups/"
title="View the available package groups">Package Groups</a></li>
- <li><a href="https://projects.parabolagnulinux.org"
+ <li><a href="//projects.parabolagnulinux.org"
title="Official Parabola projects (git)">Projects in Git</a></li>
- <li><a href="https://bugs.parabolagnulinux.org/"
+ <li><a href="//bugs.parabolagnulinux.org/"
title="Parabola's Issue Tracker">Issue Tracker</a></li>
<li><a href="/todolists/"
title="Hacker Todo Lists">Todo Lists</a></li>
@@ -180,7 +168,7 @@
<li><a href="{% url page-about %}"
title="Learn more about Parabola">About Parabola</a></li>
<li><a href="/download/" title="Get Parabola">Download Parabola</a></li>
- <li><a href="http://wiki.parabolagnulinux.org/Media"
+ <li><a href="//wiki.parabolagnulinux.org/Media"
title="Parabola in the media">Media Appearances</a></li>
<li><a href="{% url page-art %}" title="Parabola logos and other artwork for promotional use">Logos &amp; Artwork</a></li>
<li><a href="{% url news-list %}" title="News Archives">News Archives</a></li>
diff --git a/templates/public/keys.html b/templates/public/keys.html
new file mode 100644
index 00000000..2e7fcebe
--- /dev/null
+++ b/templates/public/keys.html
@@ -0,0 +1,57 @@
+{% extends "base.html" %}
+{% load pgp %}
+
+{% block title %}Arch Linux - Master Signing Keys{% endblock %}
+
+{% block content %}
+<div id="signing-keys" class="box">
+ <h2>Master Signing Keys</h2>
+
+ <p>This page lists the Arch Linux Master Keys. This is a distributed set of
+ keys that are seen as "official" signing keys of the distribution. Each key
+ is held by a different developer, and a revocation certificate for the key
+ is held by a different developer. Thus, no one developer has absolute hold
+ on any sort of absolute, root trust.</p>
+ <p>The {{ keys|length }} key{{ keys|pluralize }} listed below should be
+ regarded as the current set of master keys. They are available on public
+ keyservers and should be signed by the owner of the key.</p>
+ <p>All official Arch Linux developers and trusted users should have their
+ key signed by at least three of these master keys. This is in accordance
+ with the PGP <em>web of trust</em> concept. If a user is willing to
+ marginally trust all of the master keys, three signatures from different
+ master keys will consider a given developer's key as valid. For more
+ information on trust, please consult the
+ <a href="http://www.gnupg.org/gph/en/manual.html">GNU Privacy Handbook</a>
+ and <a href="http://www.gnupg.org/gph/en/manual.html#AEN385">Using trust to
+ validate keys</a>.</p>
+
+ <table class="pretty2">
+ <thead>
+ <tr>
+ <th>Master Key</th>
+ <th>Full Fingerprint</th>
+ <th>Owner</th>
+ <th>Owner's Signing Key</th>
+ <th>Revoker</th>
+ <th>Revoker's Signing Key</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for key in keys %}
+ <tr>
+ <td>{% pgp_key_link key.pgp_key %}</td>
+ <td>{{ key.pgp_key|pgp_fingerprint }}</td>
+ {% with key.owner.userprofile as owner_profile %}
+ <td><a href="{{ owner_profile.get_absolute_url }}">{{ key.owner.get_full_name }}</a></td>
+ <td>{% pgp_key_link owner_profile.pgp_key %}</td>
+ {% endwith %}
+ {% with key.revoker.userprofile as revoker_profile %}
+ <td><a href="{{ revoker_profile.get_absolute_url }}">{{ key.revoker.get_full_name }}</a></td>
+ <td>{% pgp_key_link revoker_profile.pgp_key %}</td>
+ {% endwith %}
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+</div>
+{% endblock %}
diff --git a/templates/public/svn.html b/templates/public/svn.html
index 80367eaf..4321f7c2 100644
--- a/templates/public/svn.html
+++ b/templates/public/svn.html
@@ -4,7 +4,7 @@
<div class="box">
<h2 class="title">SVN Repositories</h2>
<p>Parabola doesn't use any SVN repositories. But you can find our <a
- href="https://projects.parabolagnulinux.org" title="Projects
+ href="//projects.parabolagnulinux.org" title="Projects
page">Projects</a> on git!
</p>
diff --git a/templates/public/userlist.html b/templates/public/userlist.html
index 66543191..cfb08d80 100644
--- a/templates/public/userlist.html
+++ b/templates/public/userlist.html
@@ -14,7 +14,6 @@
{% with users as dev_list %}
{% include 'public/developer_list.html' %}
{% endwith %}
-
</div>
{% endcache %}
{% endblock %}
diff --git a/templates/releng/add.html b/templates/releng/add.html
index 428812c9..402ceac6 100644
--- a/templates/releng/add.html
+++ b/templates/releng/add.html
@@ -6,17 +6,17 @@
<div class="box">
<h2>Parabola Releng Testbuild Feedback Entry</h2>
- <p>This page allows you to submit feedback after testing an Parabola
- installation using a release engineering testbuild. Mark all the
- options you used during the installation; at the end you can specify
- whether everything went OK. Be sure to only denote a successful
- install after having checked the installation properly. Some options
- require you to check several things (such as config files), this will
- be mentioned alongside the option.</p> <p>There is also an overview of
- all feedback on the <a href="{% url releng-test-overview %}">results
- page</a>. Once we have builds that are properly tested (enough
- successful feedback for all important features of the ISO or a
- slightly earlier ISO), we can release new official media.</p>
+ <p>This page allows you to submit feedback after testing an Parabola installation
+ using a release engineering testbuild. Mark all the options you used during the
+ installation; at the end you can specify whether everything went OK. Be
+ sure to only denote a successful install after having checked the
+ installation properly. Some options require you to check several things (such as
+ config files), this will be mentioned alongside the option.</p>
+ <p>There is also an overview of all feedback on the
+ <a href="{% url releng-test-overview %}">results page</a>. Once we have
+ builds that are properly tested (enough successful feedback for all
+ important features of the ISO or a slightly earlier ISO), we can release new
+ official media.</p>
<div id="releng-feedback"> <form action="" method="post">{% csrf_token %}
{{ form.as_p }}
diff --git a/templates/todolists/email_notification.txt b/templates/todolists/email_notification.txt
index 10b50f64..8b22b465 100644
--- a/templates/todolists/email_notification.txt
+++ b/templates/todolists/email_notification.txt
@@ -1,10 +1,11 @@
-{% autoescape off %}The todo list {{ todolist.name }} has had the following packages added to it for which you are a maintainer:
+{% autoescape off %}The todo list "{{ todolist.name }}" has had the following packages added to it for which you are a maintainer:
{% for tpkg in todo_packages %}
* {{ tpkg.pkg.repo.name|lower }}/{{ tpkg.pkg.pkgname }} ({{ tpkg.pkg.arch.name }}) - {{ tpkg.pkg.get_full_url }}{% endfor %}
Todo list information:
-Creator: {{todolist.creator.get_full_name}}
-Name: {{todolist.name}}
+Name: {{ todolist.name }}
+URL: {{ todolist.get_full_url }}
+Creator: {{ todolist.creator.get_full_name }}
Description:
-{{todolist.description|striptags|wordwrap:69}}{% endautoescape %}
+{{ todolist.description|striptags|wordwrap:78 }}{% endautoescape %}
diff --git a/templates/todolists/view.html b/templates/todolists/view.html
index 5a80684f..d48c362d 100644
--- a/templates/todolists/view.html
+++ b/templates/todolists/view.html
@@ -1,6 +1,4 @@
{% extends "base.html" %}
-{% load package_extras %}
-
{% block title %}Parabola - Todo: {{ list.name }}{% endblock %}
{% block content %}
@@ -29,7 +27,7 @@
<th>Name</th>
<th>Arch</th>
<th>Repo</th>
- <th>Maintainer</th>
+ <th>Maintainers</th>
<th>Status</th>
</tr>
</thead>
diff --git a/templates/visualize/index.html b/templates/visualize/index.html
new file mode 100644
index 00000000..99525e69
--- /dev/null
+++ b/templates/visualize/index.html
@@ -0,0 +1,43 @@
+{% extends "base.html" %}
+
+{% block title %}Arch Linux - Visualizations{% endblock %}
+
+{% block content %}
+<div class="box">
+
+ <h2>Visualizations of Packaging Data</h2>
+
+ <h3>Package Treemap</h3>
+
+ <div class="visualize-buttons">
+ <div>
+ <span>Scale Using:</span>
+ <button id="visualize-archrepo-count" class="visualize-archrepo-scaleby active">Package Count</button>
+ <button id="visualize-archrepo-flagged" class="visualize-archrepo-scaleby">Flagged</button>
+ <button id="visualize-archrepo-csize" class="visualize-archrepo-scaleby">Compressed Size</button>
+ <button id="visualize-archrepo-isize" class="visualize-archrepo-scaleby">Installed Size</button>
+ </div>
+ <div>
+ <span>Group By:</span>
+ <button id="visualize-archrepo-repo" class="visualize-archrepo-groupby active">Repository</button>
+ <button id="visualize-archrepo-arch" class="visualize-archrepo-groupby">Architecture</button>
+ </div>
+ </div>
+ <div id="visualize-archrepo" class="visualize-chart"></div>
+</div>
+
+{% load cdn %}{% jquery %}
+<script type="text/javascript" src="/media/d3.min.js"></script>
+<script type="text/javascript" src="/media/d3.layout.min.js"></script>
+<script type="text/javascript" src="/media/archweb.js"></script>
+<script type="text/javascript" src="/media/visualize.js"></script>
+<script type="text/javascript">
+$(document).ready(function() {
+ var orderings = {
+ "repo": { url: "{% url visualize-byrepo %}", color_attr: "repo" },
+ "arch": { url: "{% url visualize-byarch %}", color_attr: "arch" },
+ };
+ packages_treemap("#visualize-archrepo", orderings, "repo");
+});
+</script>
+{% endblock %}
diff --git a/todolists/views.py b/todolists/views.py
index a63516e8..233102cf 100644
--- a/todolists/views.py
+++ b/todolists/views.py
@@ -12,6 +12,7 @@ from django.template import Context, loader
from django.utils import simplejson
from main.models import Todolist, TodolistPkg, Package
+from packages.utils import attach_maintainers
from .utils import get_annotated_todolists
class TodoListForm(forms.ModelForm):
@@ -49,6 +50,9 @@ def flag(request, listid, pkgid):
@never_cache
def view(request, listid):
todolist = get_object_or_404(Todolist, id=listid)
+ # we don't hold onto the result, but the objects are the same here,
+ # so accessing maintainers in the template is now cheap
+ attach_maintainers(tp.pkg for tp in todolist.packages)
return direct_to_template(request, 'todolists/view.html', {'list': todolist})
@login_required
@@ -157,14 +161,16 @@ def send_todolist_emails(todo_list, new_packages):
template = loader.get_template('todolists/email_notification.txt')
send_mail('Packages added to todo list \'%s\'' % todo_list.name,
template.render(ctx),
- 'Parabola <packages@list.parabolagnulinux.org>',
+ 'Parabola <dev@lists.parabolagnulinux.org>',
[maint],
fail_silently=True)
def public_list(request):
todo_lists = Todolist.objects.incomplete()
+ # total hackjob, but it makes this a lot less query-intensive.
+ all_pkgs = [tp for tl in todo_lists for tp in tl.packages]
+ attach_maintainers([tp.pkg for tp in all_pkgs])
return direct_to_template(request, "todolists/public_list.html",
{"todo_lists": todo_lists})
-
# vim: set ts=4 sw=4 et:
diff --git a/urls.py b/urls.py
index 27632dfc..64931968 100644
--- a/urls.py
+++ b/urls.py
@@ -1,20 +1,24 @@
import os.path
-from django.conf.urls.defaults import *
+# Stupid Django. Don't remove these "unused" handler imports
+from django.conf.urls.defaults import handler500, handler404, include, patterns
from django.conf import settings
from django.contrib import admin
from django.views.generic import TemplateView
+from django.views.decorators.cache import cache_page
+from django.views.i18n import null_javascript_catalog
from feeds import PackageFeed, NewsFeed
import sitemaps
-sitemaps = {
+our_sitemaps = {
'base': sitemaps.BaseSitemap,
'news': sitemaps.NewsSitemap,
'packages': sitemaps.PackagesSitemap,
'package-files': sitemaps.PackageFilesSitemap,
'package-groups': sitemaps.PackageGroupsSitemap,
+ 'split-packages': sitemaps.SplitPackagesSitemap,
}
admin.autodiscover()
@@ -33,10 +37,12 @@ feeds_patterns = patterns('',
# Sitemaps
urlpatterns += patterns('django.contrib.sitemaps.views',
+ # Thanks Django, we can't cache these longer because of
+ # https://code.djangoproject.com/ticket/2713
(r'^sitemap.xml$', 'index',
- {'sitemaps': sitemaps}),
+ {'sitemaps': our_sitemaps}),
(r'^sitemap-(?P<section>.+)\.xml$', 'sitemap',
- {'sitemaps': sitemaps}),
+ {'sitemaps': our_sitemaps}),
)
# Authentication / Admin
@@ -58,15 +64,17 @@ urlpatterns += patterns('public.views',
{}, 'page-art'),
(r'^svn/$', TemplateView.as_view(template_name='public/svn.html'),
{}, 'page-svn'),
- (r'^hackers/$', 'userlist', { 'type':'hackers' }, 'page-devs'),
- (r'^fellows/$', 'userlist', { 'type':'fellows' }, 'page-fellows'),
+ (r'^hackers/$', 'userlist', { 'user_type':'hackers' }, 'page-devs'),
+ (r'^fellows/$', 'userlist', { 'user_type':'fellows' }, 'page-fellows'),
(r'^donate/$', 'donate', {}, 'page-donate'),
(r'^download/$', 'download', {}, 'page-download'),
+ (r'^master-keys/$', 'keys', {}, 'page-keys'),
)
# Includes and other remaining stuff
urlpatterns += patterns('',
- (r'^jsi18n/$', 'django.views.i18n.null_javascript_catalog'),
+ # cache this static JS resource for 1 week rather than default 5 minutes
+ (r'^jsi18n/$', cache_page(604800)(null_javascript_catalog)),
(r'^admin/', include(admin.site.urls)),
(r'^devel/', include('devel.urls')),
(r'^feeds/', include(feeds_patterns)),
diff --git a/visualize/__init__.py b/visualize/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/visualize/__init__.py
diff --git a/visualize/models.py b/visualize/models.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/visualize/models.py
diff --git a/visualize/tests.py b/visualize/tests.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/visualize/tests.py
diff --git a/visualize/urls.py b/visualize/urls.py
new file mode 100644
index 00000000..57ee0626
--- /dev/null
+++ b/visualize/urls.py
@@ -0,0 +1,9 @@
+from django.conf.urls.defaults import patterns
+
+urlpatterns = patterns('visualize.views',
+ (r'^$', 'index', {}, 'visualize-index'),
+ (r'^by_arch/$', 'by_arch', {}, 'visualize-byarch'),
+ (r'^by_repo/$', 'by_repo', {}, 'visualize-byrepo'),
+)
+
+# vim: set ts=4 sw=4 et:
diff --git a/visualize/views.py b/visualize/views.py
new file mode 100644
index 00000000..f2b1d63b
--- /dev/null
+++ b/visualize/views.py
@@ -0,0 +1,69 @@
+from django.db.models import Count, Sum
+from django.http import HttpResponse
+from django.utils import simplejson
+from django.views.decorators.cache import cache_page
+from django.views.generic.simple import direct_to_template
+
+from main.models import Package, Arch, Repo
+
+def index(request):
+ return direct_to_template(request, 'visualize/index.html', {})
+
+def arch_repo_data():
+ qs = Package.objects.select_related().values(
+ 'arch__name', 'repo__name').annotate(
+ count=Count('pk'), csize=Sum('compressed_size'),
+ isize=Sum('installed_size'),
+ flagged=Count('flag_date')).order_by()
+ arches = Arch.objects.values_list('name', flat=True)
+ repos = Repo.objects.values_list('name', flat=True)
+
+ def build_map(name, arch, repo):
+ key = '%s:%s' % (repo or '', arch or '')
+ return {
+ 'key': key,
+ 'name': name,
+ 'arch': arch,
+ 'repo': repo,
+ 'data': [],
+ }
+
+ # now transform these results into two mappings: one ordered (repo, arch),
+ # and one ordered (arch, repo).
+ arch_groups = dict((a, build_map(a, a, None)) for a in arches)
+ repo_groups = dict((r, build_map(r, None, r)) for r in repos)
+ for row in qs:
+ arch = row['arch__name']
+ repo = row['repo__name']
+ values = {
+ 'arch': arch,
+ 'repo': repo,
+ 'name': '%s (%s)' % (repo, arch),
+ 'key': '%s:%s' % (repo, arch),
+ 'csize': row['csize'],
+ 'isize': row['isize'],
+ 'count': row['count'],
+ 'flagged': row['flagged'],
+ }
+ arch_groups[arch]['data'].append(values)
+ repo_groups[repo]['data'].append(values)
+
+ data = {
+ 'by_arch': { 'name': 'Architectures', 'data': arch_groups.values() },
+ 'by_repo': { 'name': 'Repositories', 'data': repo_groups.values() },
+ }
+ return data
+
+@cache_page(1800)
+def by_arch(request):
+ data = arch_repo_data()
+ to_json = simplejson.dumps(data['by_arch'], ensure_ascii=False)
+ return HttpResponse(to_json, mimetype='application/json')
+
+@cache_page(1800)
+def by_repo(request):
+ data = arch_repo_data()
+ to_json = simplejson.dumps(data['by_repo'], ensure_ascii=False)
+ return HttpResponse(to_json, mimetype='application/json')
+
+# vim: set ts=4 sw=4 et: