Commit 332f6462 authored by Łukasz Nowak's avatar Łukasz Nowak

Re-merge bug_778748.

Revert "Revert "Merge branch 'bug_778748'""

This reverts commit 57e5a60d.
parent 818082c4
...@@ -27,4 +27,3 @@ defaults = ...@@ -27,4 +27,3 @@ defaults =
'-t', '-t',
'!(bootstrap|selectingpython|selecting-python)', '!(bootstrap|selectingpython|selecting-python)',
] ]
...@@ -82,7 +82,7 @@ setup( ...@@ -82,7 +82,7 @@ setup(
packages = ['zc', 'zc.buildout'], packages = ['zc', 'zc.buildout'],
package_dir = {'': 'src'}, package_dir = {'': 'src'},
namespace_packages = ['zc'], namespace_packages = ['zc'],
install_requires = 'setuptools', install_requires = ['setuptools'],
include_package_data = True, include_package_data = True,
entry_points = entry_points, entry_points = entry_points,
extras_require = dict(test=['zope.testing']), extras_require = dict(test=['zope.testing']),
......
...@@ -362,6 +362,8 @@ class Buildout(UserDict.DictMixin): ...@@ -362,6 +362,8 @@ class Buildout(UserDict.DictMixin):
newest=self.newest, newest=self.newest,
allow_hosts=self._allow_hosts, allow_hosts=self._allow_hosts,
prefer_final=not self.accept_buildout_test_releases, prefer_final=not self.accept_buildout_test_releases,
sha_cache=options.get('sha-cache'),
sha_dir=options.get('sha-dir'),
) )
# Now copy buildout and setuptools eggs, and record destination eggs: # Now copy buildout and setuptools eggs, and record destination eggs:
...@@ -860,6 +862,8 @@ class Buildout(UserDict.DictMixin): ...@@ -860,6 +862,8 @@ class Buildout(UserDict.DictMixin):
path = [options['develop-eggs-directory']], path = [options['develop-eggs-directory']],
allow_hosts = self._allow_hosts, allow_hosts = self._allow_hosts,
prefer_final=not self.accept_buildout_test_releases, prefer_final=not self.accept_buildout_test_releases,
sha_cache=options.get('sha-cache'),
sha_dir=options.get('sha-cache'),
) )
upgraded = [] upgraded = []
...@@ -1087,7 +1091,9 @@ def _install_and_load(spec, group, entry, buildout): ...@@ -1087,7 +1091,9 @@ def _install_and_load(spec, group, entry, buildout):
working_set=pkg_resources.working_set, working_set=pkg_resources.working_set,
newest=buildout.newest, newest=buildout.newest,
allow_hosts=buildout._allow_hosts, allow_hosts=buildout._allow_hosts,
prefer_final=not buildout.accept_buildout_test_releases) prefer_final=not buildout.accept_buildout_test_releases,
sha_dir=buildout_options.get('sha-dir'),
sha_cache=buildout_options.get('sha-cache'))
__doing__ = 'Loading %s recipe entry %s:%s.', group, spec, entry __doing__ = 'Loading %s recipe entry %s:%s.', group, spec, entry
return pkg_resources.load_entry_point( return pkg_resources.load_entry_point(
......
...@@ -17,7 +17,6 @@ try: ...@@ -17,7 +17,6 @@ try:
from hashlib import md5 from hashlib import md5
except ImportError: except ImportError:
from md5 import new as md5 from md5 import new as md5
from zc.buildout.easy_install import realpath
import logging import logging
import os import os
import os.path import os.path
...@@ -72,6 +71,8 @@ class Download(object): ...@@ -72,6 +71,8 @@ class Download(object):
self.fallback = fallback self.fallback = fallback
self.hash_name = hash_name self.hash_name = hash_name
self.logger = logger or logging.getLogger('zc.buildout') self.logger = logger or logging.getLogger('zc.buildout')
self.sha_dir = options.get('sha-dir')
self.sha_cache = options.get('sha-cache')
@property @property
def download_cache(self): def download_cache(self):
...@@ -177,20 +178,25 @@ class Download(object): ...@@ -177,20 +178,25 @@ class Download(object):
self.logger.info('Downloading %s' % url) self.logger.info('Downloading %s' % url)
urllib._urlopener = url_opener urllib._urlopener = url_opener
handle, tmp_path = tempfile.mkstemp(prefix='buildout-') handle, tmp_path = tempfile.mkstemp(prefix='buildout-')
try:
try: try:
if not download_network_cached(self.sha_dir, self.sha_cache,
tmp_path, url, self.logger, md5sum):
# Download from original url
tmp_path, headers = urllib.urlretrieve(url, tmp_path) tmp_path, headers = urllib.urlretrieve(url, tmp_path)
if not check_md5sum(tmp_path, md5sum): if not check_md5sum(tmp_path, md5sum):
raise ChecksumError( raise ChecksumError(
'MD5 checksum mismatch downloading %r' % url) 'MD5 checksum mismatch downloading %r' % url)
except IOError, e: # Upload the file to networkcached.
os.remove(tmp_path) upload_network_cached(self.sha_cache, self.sha_dir, url,
raise zc.buildout.UserError("Error downloading extends for URL " tmp_path, self.logger)
"%s: %r" % (url, e[1:3]))
except Exception, e:
os.remove(tmp_path)
raise
finally: finally:
os.close(handle) os.close(handle)
except:
os.remove(tmp_path)
raise
if path: if path:
shutil.move(tmp_path, path) shutil.move(tmp_path, path)
...@@ -251,6 +257,9 @@ def remove(path): ...@@ -251,6 +257,9 @@ def remove(path):
if os.path.exists(path): if os.path.exists(path):
os.remove(path) os.remove(path)
from zc.buildout.networkcache import download_network_cached, \
upload_network_cached
from zc.buildout.easy_install import realpath
def locate_at(source, dest): def locate_at(source, dest):
if dest is None or realpath(dest) == realpath(source): if dest is None or realpath(dest) == realpath(source):
......
...@@ -41,6 +41,12 @@ _oprp = getattr(os.path, 'realpath', lambda path: path) ...@@ -41,6 +41,12 @@ _oprp = getattr(os.path, 'realpath', lambda path: path)
def realpath(path): def realpath(path):
return os.path.normcase(os.path.abspath(_oprp(path))) return os.path.normcase(os.path.abspath(_oprp(path)))
from zc.buildout.networkcache import get_filename_from_url, \
upload_network_cached, \
download_network_cached
default_index_url = os.environ.get( default_index_url = os.environ.get(
'buildout-testing-index-url', 'buildout-testing-index-url',
'http://pypi.python.org/simple', 'http://pypi.python.org/simple',
...@@ -335,6 +341,8 @@ class Installer: ...@@ -335,6 +341,8 @@ class Installer:
include_site_packages=None, include_site_packages=None,
allowed_eggs_from_site_packages=None, allowed_eggs_from_site_packages=None,
prefer_final=None, prefer_final=None,
sha_dir=None,
sha_cache=None
): ):
self._dest = dest self._dest = dest
self._allow_hosts = allow_hosts self._allow_hosts = allow_hosts
...@@ -403,6 +411,9 @@ class Installer: ...@@ -403,6 +411,9 @@ class Installer:
if versions is not None: if versions is not None:
self._versions = versions self._versions = versions
self._sha_dir = sha_dir
self._sha_cache = sha_cache
_allowed_eggs_from_site_packages_regex = None _allowed_eggs_from_site_packages_regex = None
def allow_site_package_egg(self, name): def allow_site_package_egg(self, name):
if (not self._include_site_packages or if (not self._include_site_packages or
...@@ -702,7 +713,14 @@ class Installer: ...@@ -702,7 +713,14 @@ class Installer:
): ):
return dist return dist
filename = get_filename_from_url(dist.location)
new_location = os.path.join(tmp, filename)
if not download_network_cached(self._sha_dir, self._sha_cache,
new_location, dist.location, logger):
new_location = self._index.download(dist.location, tmp) new_location = self._index.download(dist.location, tmp)
upload_network_cached(self._sha_cache, self._sha_dir,
dist.location, new_location, logger)
if (download_cache if (download_cache
and (realpath(new_location) == realpath(dist.location)) and (realpath(new_location) == realpath(dist.location))
and os.path.isfile(new_location) and os.path.isfile(new_location)
...@@ -1080,13 +1098,13 @@ def install(specs, dest, ...@@ -1080,13 +1098,13 @@ def install(specs, dest,
path=None, working_set=None, newest=True, versions=None, path=None, working_set=None, newest=True, versions=None,
use_dependency_links=None, allow_hosts=('*',), use_dependency_links=None, allow_hosts=('*',),
include_site_packages=None, allowed_eggs_from_site_packages=None, include_site_packages=None, allowed_eggs_from_site_packages=None,
prefer_final=None): prefer_final=None, sha_dir=None, sha_cache=None):
installer = Installer( installer = Installer(
dest, links, index, executable, always_unzip, path, newest, dest, links, index, executable, always_unzip, path, newest,
versions, use_dependency_links, allow_hosts=allow_hosts, versions, use_dependency_links, allow_hosts=allow_hosts,
include_site_packages=include_site_packages, include_site_packages=include_site_packages,
allowed_eggs_from_site_packages=allowed_eggs_from_site_packages, allowed_eggs_from_site_packages=allowed_eggs_from_site_packages,
prefer_final=prefer_final) prefer_final=prefer_final, sha_dir=sha_dir, sha_cache=sha_cache)
return installer.install(specs, working_set) return installer.install(specs, working_set)
......
##############################################################################
#
# Copyright (c) 2010 ViFiB SARL and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import hashlib
import os
import posixpath
import re
import urllib
import urlparse
_md5_re = re.compile(r'md5=([a-f0-9]+)')
def _get_md5_from_url(url):
match = _md5_re.search(url)
if match:
return match.group(1)
return None
def get_directory_key(url):
"""It must retorn the directory key based on the URL.
Basically check if the url belongs to pypi:
- if yes, the directory key will be pypi-buildout-urlmd5
- if not, the directory key will be slapos-buildout-urlmd5
"""
urlmd5 = hashlib.md5(url).hexdigest()
if 'pypi' in url:
return 'pypi-buildout-%s' % urlmd5
return 'slapos-buildout-%s' % urlmd5
def download_network_cached(sha_dir, sha_cache, path, url, logger, md5sum=None):
"""Download from a network cache provider
If something fail (providor be offline, or hash_string fail), we ignore
network cached files.
return True if download succeeded.
"""
try:
from slapos.libnetworkcache import NetworkcacheClient, UploadError, \
DirectoryNotFound
except:
return False
if sha_cache in (None, '',):
# Not able to use network cache
return False
if md5sum is None:
md5sum = _get_md5_from_url(url)
directory_key = get_directory_key(url)
file_name = os.path.basename(url)
logger.info('Downloading %s from network cache.' % file_name)
try:
nc = NetworkcacheClient(shacache=sha_cache, shadir=sha_dir)
file_content = nc.select(directory_key)
f = open(path, 'w+b')
try:
f.seek(0)
f.write(file_content)
finally:
f.close()
if not check_md5sum(path, md5sum):
logger.info('MD5 checksum mismatch downloading %s' % file_name)
return False
except (IOError, DirectoryNotFound), e:
logger.info('Fail to download from network cache %s: %s' % \
(file_name, str(e)))
return False
return True
def upload_network_cached(sha_cache, sha_dir, external_url, path, logger):
"""Upload file to a shacache server"""
try:
from slapos.libnetworkcache import NetworkcacheClient, UploadError, \
DirectoryNotFound
except:
return False
if sha_cache in [None, ''] or sha_dir in [None, '']:
logger.debug(
'Upload cache ignored, shacache or shadir was not provided!')
return False
file_name = external_url.split('/')[-1].split('#')[0].split('?')[0]
directory_key = get_directory_key(external_url)
kw = dict(file=file_name,
urlmd5=hashlib.md5(external_url).hexdigest())
f = open(path, 'r')
try:
nc = NetworkcacheClient(shacache=sha_cache,
shadir=sha_dir)
data = nc.upload(f, directory_key, **kw)
except (IOError, UploadError), e:
logger.info('Fail to upload file. %s' % \
(str(e)))
return False
finally:
f.close()
return True
def get_filename_from_url(url):
"""Inspired how pip get filename from url.
"""
url = url.split('#', 1)[0]
url = url.split('?', 1)[0]
url = url.rstrip('/')
name = posixpath.basename(url)
assert name, (
'URL %r produced no filename' % url)
return name
from download import check_md5sum
Using buildout with network-cache
=================================
NOTE: You need to install the networkcached into your system to run this
test, otherwise it will not be possible to get proper results.
The buildout itself must be able to cache everything which is downloadble
into an external and universal cache server.
Such feature is not enabled by default, but you can enable defining the
option called ```network-cache```.
>>> write(sample_buildout, 'buildout.cfg',
... '''
... [buildout]
... network-cache = http://127.0.0.1:5001/
... find-links = %(link_server)s
... parts = eggs
...
... [eggs]
... recipe = zc.recipe.egg:eggs
... eggs = demo ==0.2
... ''' % globals())
>>> print system(buildout)
Installing eggs.
Getting distribution for 'demo==0.2'.
The url is not cached yet: http://localhost/demo-0.2-py2.6.egg
Got demo 0.2.
Getting distribution for 'demoneeded'.
The url is not cached yet: http://localhost/demoneeded-1.2c1.zip
Got demoneeded 1.2c1.
...
>>> from glob import glob
>>> from os.path import join
>>> for egg in glob(join(sample_buildout, 'eggs', 'demoneeded*.egg')):
... remove(sample_buildout, 'eggs', egg)
>>> print system(buildout)
Updating eggs.
Getting distribution for 'demoneeded'.
Downloading from network cache http://127.0.0.1:5001/105e4a9ba3f9c7f45b46cc8c47ac2d2ca6440ae564538090c6d390a8959dd03e
Got demoneeded 1.2c1.
...
...@@ -4053,7 +4053,7 @@ def test_suite(): ...@@ -4053,7 +4053,7 @@ def test_suite():
), ),
doctest.DocFileSuite( doctest.DocFileSuite(
'download.txt', 'extends-cache.txt', 'download.txt', 'extends-cache.txt', 'networkcache.txt',
setUp=easy_install_SetUp, setUp=easy_install_SetUp,
tearDown=zc.buildout.testing.buildoutTearDown, tearDown=zc.buildout.testing.buildoutTearDown,
optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS, optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS,
......
...@@ -88,6 +88,11 @@ class Eggs(object): ...@@ -88,6 +88,11 @@ class Eggs(object):
kw = {} kw = {}
if 'unzip' in options: if 'unzip' in options:
kw['always_unzip'] = options.query_bool('unzip', None) kw['always_unzip'] = options.query_bool('unzip', None)
if 'sha-cache' in b_options:
kw['sha_cache'] = b_options.get('sha-cache')
if 'sha-dir' in b_options:
kw['sha_dir'] = b_options.get('sha-dir')
ws = zc.buildout.easy_install.install( ws = zc.buildout.easy_install.install(
distributions, options['eggs-directory'], distributions, options['eggs-directory'],
links=self.links, links=self.links,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment