Commit ac8463ad authored by Julien Muchembled's avatar Julien Muchembled

More refactoring and bugfixes

- Deprecate slapos.networkcachehelper
- Common internal method to query network cache, to reduce code duplication
  and fix downloading though SSL.
- Do not upload data that is already in SHACACHE. This reverts commit
  7bb5e112 partially, since it's not possible
  to hash on the fly. 'tempfile' module is reimported for non-seekable streams.
- New way to instanciate NetworkcacheClient, again to reduce code duplication,
  but also to use an instance for both upload & download.
  Old way is still there for compatibility until it is unused.
parent a2ee9fa6
This diff is collapsed.
......@@ -79,6 +79,7 @@ class NCHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
assert 'shacache' in self.path
assert self.headers.getheader('content-type') == 'application/octet-stream'
path = os.path.abspath(os.path.join(self.tree, *self.path.split('/')))
if not os.path.exists(path):
os.makedirs(path)
......@@ -86,6 +87,9 @@ class NCHandler(BaseHTTPServer.BaseHTTPRequestHandler):
cksum = hashlib.sha512(data).hexdigest()
path = os.path.join(path, cksum)
# Although real server would accept the request,
# clients should avoid uploading same content twice.
assert not os.path.exists(path)
open(path, 'wb').write(data)
self.send_response(201)
self.send_header('Content-Length', str(len(cksum)))
......@@ -152,32 +156,6 @@ class OfflineTest(unittest.TestCase):
self.shadir_url)
self.assertRaises(IOError, nc.upload, StringIO())
def test_init_method_normal_http_url(self):
"""
Check if the init method is setting the attributes correctly.
"""
nc = slapos.libnetworkcache.NetworkcacheClient(shacache=self.shacache_url,
shadir=self.shadir_url)
self.assertEquals({'Content-Type': 'application/json'}, \
nc.shacache_header_dict)
self.assertEquals(self.host, nc.shacache_host)
self.assertEquals(self.shacache_path, nc.shacache_path)
self.assertEquals(self.port, nc.shacache_port)
self.assertEquals(self.shacache_url, nc.shacache_url)
self.assertEquals({'Content-Type': 'application/json'}, \
nc.shadir_header_dict)
self.assertEquals(self.host, nc.shadir_host)
self.assertEquals(self.shadir_path, nc.shadir_path)
self.assertEquals(self.port, nc.shadir_port)
def test_init_backward_compatible(self):
"""Checks that invocation with minimal parameter works fine"""
nc = slapos.libnetworkcache.NetworkcacheClient(shacache=self.shacache_url,
shadir=self.shadir_url)
self.assertEqual(nc.shacache_url, self.shacache_url)
self.assertTrue(nc.shadir_host in self.shadir_url)
class OnlineMixin:
handler = NCHandler
......@@ -562,38 +540,6 @@ class OnlineTest(OnlineMixin, unittest.TestCase):
selected = signed_nc.select(key).read()
self.assertEqual(selected, self.test_string)
def test_shacache_key_cert_accepted(self):
key_file = tempfile.NamedTemporaryFile()
key_file.write(self.key)
key_file.flush()
certificate_file = tempfile.NamedTemporaryFile()
certificate_file.write(self.certificate)
certificate_file.flush()
nc = slapos.libnetworkcache.NetworkcacheClient(self.shacache, self.shadir,
shacache_cert_file=certificate_file, shacache_key_file=key_file)
# simplified assertion, as no http authentication server is available
self.assertEqual(nc.shacache_cert_file, certificate_file)
self.assertEqual(nc.shacache_key_file, key_file)
def test_shadir_key_cert_accepted(self):
key_file = tempfile.NamedTemporaryFile()
key_file.write(self.auth_key)
key_file.flush()
certificate_file = tempfile.NamedTemporaryFile()
certificate_file.write(self.auth_certificate)
certificate_file.flush()
# simplified assertion, as no http authentication server is available
nc = slapos.libnetworkcache.NetworkcacheClient(self.shadir, self.shadir,
shadir_cert_file=certificate_file, shadir_key_file=key_file)
# simplified assertion, as no http authentication server is available
self.assertEqual(nc.shadir_cert_file, certificate_file)
self.assertEqual(nc.shadir_key_file, key_file)
@unittest.skipUnless(os.environ.get('TEST_SHA_CACHE', '') != '',
"Requires standalone test server")
......
......@@ -12,11 +12,13 @@
#
##############################################################################
# BBB: Deprecated. This file is ugly and must disappear.
# DO NOT EXTEND IT. Add methods to NetworkcacheClient class instead.
import json
import logging
import os
import shutil
import tarfile
import urllib2
from slapos.libnetworkcache import NetworkcacheClient, UploadError, \
DirectoryNotFound
......@@ -25,19 +27,7 @@ logging.basicConfig()
logger = logging.getLogger('networkcachehelper')
logger.setLevel(logging.INFO)
def _split_last_directory(path):
"""
If basename(path) is a file (i.e /path/to/directory), do a simple split.
If basename(path) is a directory (i.e /path/to/directory/), split again to
have pair like ('/path/to', 'directory').
"""
path_dirname, path_basename = os.path.split(path)
if not path_basename:
# We were given a path like "/path/to/directory/": Split again.
path_dirname, path_basename = os.path.split(path_dirname)
return path_dirname, path_basename
def helper_upload_network_cached(dir_url, cache_url,
def __upload_network_cached(dir_url, cache_url,
file_descriptor, directory_key,
signature_private_key_file, shacache_cert_file, shacache_key_file,
shadir_cert_file, shadir_key_file, metadata_dict={}):
......@@ -83,28 +73,9 @@ def helper_upload_network_cached(dir_url, cache_url,
except (IOError, UploadError), e:
logger.info('Failed to upload file. %s' % str(e))
return False
return True
def helper_upload_network_cached_from_file(dir_url, cache_url,
path, directory_key, metadata_dict,
signature_private_key_file, shacache_cert_file, shacache_key_file,
shadir_cert_file, shadir_key_file):
"""
Upload an existing file, using a file_descriptor.
"""
file_descriptor = open(path, 'r')
return helper_upload_network_cached(
dir_url=dir_url,
cache_url=cache_url,
file_descriptor=file_descriptor,
directory_key=directory_key,
signature_private_key_file=signature_private_key_file,
shacache_cert_file=shacache_cert_file,
shacache_key_file=shacache_key_file,
shadir_cert_file=shadir_cert_file,
shadir_key_file=shadir_key_file,
metadata_dict=metadata_dict,
)
# BBB: slapos.buildout (1.6.0-dev-SlapOS-011) imports it without using it
helper_upload_network_cached_from_file = None
def helper_upload_network_cached_from_directory(dir_url, cache_url,
path, directory_key, metadata_dict,
......@@ -113,25 +84,10 @@ def helper_upload_network_cached_from_directory(dir_url, cache_url,
"""
Create a tar from a given directory (path) then upload it to networkcache.
"""
# Create tar file. Don't create it to /tmp dir as it can be too small.
path_dirname, path_basename = _split_last_directory(path)
tarpath = os.path.join(path_dirname, '%s.tar' % path_basename)
tar = tarfile.open(tarpath, "w:gz")
try:
try:
tar.add(path, arcname=path_basename)
finally:
tar.close()
# Upload it
result = helper_upload_network_cached_from_file(dir_url, cache_url,
tarpath, directory_key, metadata_dict,
return __upload_network_cached(dir_url, cache_url,
NetworkcacheClient.archive(path.rstrip(os.sep)), directory_key,
signature_private_key_file, shacache_cert_file, shacache_key_file,
shadir_cert_file, shadir_key_file)
finally:
# Always clean it
if os.path.exists(tarpath):
os.remove(tarpath)
return result
shadir_cert_file, shadir_key_file, metadata_dict)
def helper_download_network_cached(dir_url, cache_url,
......@@ -257,26 +213,13 @@ def helper_download_network_cached_to_directory(dir_url, cache_url,
"""
Download a tar file from network cache and untar it to specified path.
"""
# Download tar file. Don't download to /tmp dir as it can be too small.
path_dirname, path_basename = _split_last_directory(path)
tarpath = os.path.join(path_dirname, '%s.tar' % path_basename)
try:
metadata_dict = helper_download_network_cached_to_file(
dir_url, cache_url,
result = helper_download_network_cached(dir_url, cache_url,
signature_certificate_list,
directory_key, tarpath, wanted_metadata_dict, required_key_list,
strategy)
if metadata_dict:
# Untar it to path
tar = tarfile.open(tarpath)
directory_key, wanted_metadata_dict, required_key_list, strategy)
if result:
file_descriptor, metadata_dict = result
try:
logger.info("Extracting downloaded archive from cache...")
tar.extractall(path=os.path.dirname(path))
finally:
tar.close()
finally:
# Always clean it
if os.path.exists(tarpath):
os.remove(tarpath)
NetworkcacheClient.extract(path.rstrip('/'), file_descriptor)
return metadata_dict
finally:
file_descriptor.close()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment