Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add --json flag to print download information #5400

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions news/5398.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add machine readable --json to pip download and add --log-stderr
21 changes: 17 additions & 4 deletions src/pip/_internal/basecommand.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,9 @@ def main(self, args):
logger_class = "pip._internal.utils.logging.ColorizedStreamHandler"
handler_class = "pip._internal.utils.logging.BetterRotatingFileHandler"

logging.config.dictConfig({
stdout, stderr = self.log_streams

config = {
"version": 1,
"disable_existing_loggers": False,
"filters": {
Expand All @@ -155,15 +157,15 @@ def main(self, args):
"level": level,
"class": logger_class,
"no_color": options.no_color,
"stream": self.log_streams[0],
"stream": stderr if options.log_stderr else stdout,
"filters": ["exclude_warnings"],
"formatter": "indent",
},
"console_errors": {
"level": "WARNING",
"class": logger_class,
"no_color": options.no_color,
"stream": self.log_streams[1],
"stream": stderr,
"formatter": "indent",
},
"user_log": {
Expand All @@ -173,6 +175,13 @@ def main(self, args):
"delay": True,
"formatter": "indent",
},
"structured_output": {
"level": "DEBUG",
"class": logger_class,
"no_color": True,
"stream": stdout,
"formatter": "indent",
},
},
"root": {
"level": root_level,
Expand All @@ -194,7 +203,11 @@ def main(self, args):
"pip._vendor", "distlib", "requests", "urllib3"
]
},
})
}
config["loggers"]["pip.__structured_output"] = {
"handlers": ["structured_output"],
}
logging.config.dictConfig(config)

# TODO: try to get these passing down from the command?
# without resorting to os.environ to hold these.
Expand Down
10 changes: 10 additions & 0 deletions src/pip/_internal/cmdoptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,15 @@ def extra_index_url():
help='Ignore package index (only looking at --find-links URLs instead).',
) # type: Any

log_stderr = partial(
Option,
'--log-stderr',
dest='log_stderr',
action='store_true',
default=False,
help="Log logger warnings to stderr",
)


def find_links():
return Option(
Expand Down Expand Up @@ -604,6 +613,7 @@ def _merge_hash(option, opt_str, value, parser):
no_cache,
disable_pip_version_check,
no_color,
log_stderr,
]
}

Expand Down
36 changes: 36 additions & 0 deletions src/pip/_internal/commands/download.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import absolute_import

import json
import logging
import os

Expand Down Expand Up @@ -115,6 +116,15 @@ def __init__(self, *args, **kw):
"this option."),
)

cmd_opts.add_option(
'--json',
dest='json',
action='store_true',
default=False,
help=("Output information about downloaded packages as json. "
"See documentation for caveats."),
)

index_opts = cmdoptions.make_option_group(
cmdoptions.index_group,
self.parser,
Expand Down Expand Up @@ -227,8 +237,34 @@ def run(self, options, args):
if downloaded:
logger.info('Successfully downloaded %s', downloaded)

if options.json:
details = self._get_download_details(
resolver, requirement_set, options.download_dir)
logging.getLogger('pip.__structured_output').info(
json.dumps(details))

# Clean up
if not options.no_clean:
requirement_set.cleanup_files()

return requirement_set

def _get_download_details(self, resolver, requirement_set, download_dir):
downloaded = []
download_dir = os.path.abspath(download_dir)
for req in requirement_set.successfully_downloaded:
deps = resolver.get_dependencies().get(req.name, [])
download_path = os.path.join(download_dir, req.link.filename)
downloaded.append(
{
'name': req.name,
'download_path': download_path,
'url': req.link.url,
'version': req.version,
'dependencies': [
{'name': dep.name, 'version': dep.version}
for dep in deps
],
}
)
return downloaded
4 changes: 2 additions & 2 deletions src/pip/_internal/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,7 +486,7 @@ def find_requirement(self, req, upgrade):
"""Try to find a Link matching req

Expects req, an InstallRequirement and upgrade, a boolean
Returns a Link if found,
Returns an InstallationCandidate if found,
Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
"""
all_candidates = self.find_all_candidates(req.name)
Expand Down Expand Up @@ -579,7 +579,7 @@ def find_requirement(self, req, upgrade):
best_candidate.version,
', '.join(sorted(compatible_versions, key=parse_version))
)
return best_candidate.location
return best_candidate

def _get_pages(self, locations, project_name):
"""
Expand Down
29 changes: 27 additions & 2 deletions src/pip/_internal/req/req_install.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@
from pip._internal.download import (
is_archive_file, is_url, path_to_url, url_to_path,
)
from pip._internal.exceptions import InstallationError, UninstallationError
from pip._internal.exceptions import (
InstallationError, InvalidWheelFilename, UninstallationError,
)
from pip._internal.locations import (
PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
)
Expand Down Expand Up @@ -304,7 +306,9 @@ def populate_link(self, finder, upgrade, require_hashes):
to file modification times.
"""
if self.link is None:
self.link = finder.find_requirement(self, upgrade)
candidate = finder.find_requirement(self, upgrade)
self.link = candidate.location
self._found_version = candidate.version
if self._wheel_cache is not None and not require_hashes:
old_link = self.link
self.link = self._wheel_cache.get(self.link, self.name)
Expand All @@ -325,6 +329,27 @@ def is_pinned(self):
return (len(specifiers) == 1 and
next(iter(specifiers)).operator in {'==', '==='})

_found_version = None

@property
def version(self):
""" The version if available, else None

The version determined during requirement resolution if available,
the wheel version from the filename if available, or None if no
version information could be obtained
"""
if self._found_version:
return str(self._found_version)
# If we didn't lookup the version from the internet/a finder, try to
# guess it
if self.is_wheel:
try:
return Wheel(self.link.filename).version
except InvalidWheelFilename:
pass
return None

def from_path(self):
if self.req is None:
return None
Expand Down
7 changes: 7 additions & 0 deletions src/pip/_internal/resolve.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,3 +352,10 @@ def schedule(req):
for install_req in req_set.requirements.values():
schedule(install_req)
return order

def get_dependencies(self):
""" Gets dependencies discovered after resolution

Returns a mapping of package names to lists of requirement objects
"""
return self._discovered_dependencies
79 changes: 79 additions & 0 deletions tests/functional/test_download.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import json
import os
import sys
import textwrap

import pytest
from pip._vendor.six.moves.urllib.parse import urlparse

from pip._internal.status_codes import ERROR
from tests.lib.path import Path
Expand Down Expand Up @@ -88,6 +91,82 @@ def test_basic_download_should_download_dependencies(script):
assert script.site_packages / 'openid' not in result.files_created


@pytest.mark.network
def test_prints_json(script):
result = script.pip(
'download', 'flake8==3.5.0', '-d', '.', '--log-stderr', '--json',
expect_stderr=True
)

expected = {
'flake8': {
'dependencies': [
'pyflakes',
'enum34',
'configparser',
'pycodestyle',
'mccabe',
],
'filename': 'flake8-{version}-py2.py3-none-any.whl'
},
'pyflakes': {
'dependencies': [],
'filename': 'pyflakes-{version}-py2.py3-none-any.whl'
},
'pycodestyle': {
'dependencies': [],
'filename': 'pycodestyle-{version}-py2.py3-none-any.whl'
},
'mccabe': {
'dependencies': [],
'filename': 'mccabe-{version}-py2.py3-none-any.whl'
},
'configparser': {
'dependencies': [],
'filename': 'configparser-{version}.tar.gz',
},
'enum34': {
'dependencies': [],
'filename': 'enum34-{version}-py{py_version}-none-any.whl',
},
}
expected_keys = ['dependencies', 'download_path', 'name', 'url', 'version']

actual = json.loads(result.stdout)
transformed = {package['name']: package for package in actual}

assert 'flake8' in transformed
assert 'pyflakes' in transformed

for package in actual:
assert sorted(package.keys()) == expected_keys

expected_package = expected[package['name']]
version = package['version']
url = urlparse(package['url'])
filename = expected_package['filename'].format(
version=version, py_version=sys.version_info[0])

created = result.files_created[Path('scratch') / filename]
created_path = os.path.join(created.base_path, created.path)

# Windows likes to spit this path out lowercase.
assert package['download_path'].lower() == created_path.lower()
assert url.scheme == 'https'
assert url.hostname == 'files.pythonhosted.org'
assert Path(url.path).name == filename
if package['dependencies']:
# Dependencies can change between python versions. Just try to get
# /something/ matched
assert any(
(dep['name'] in expected for dep in package['dependencies']))
for dep in package['dependencies']:
assert sorted(dep.keys()) == ['name', 'version']
if dep['name'] in expected:
expected_version = transformed[dep['name']]['version']
assert dep['version'] == expected_version


def test_download_wheel_archive(script, data):
"""
It should download a wheel archive path
Expand Down
Loading