Commit 2d374bc9 authored by Thomas Gambier's avatar Thomas Gambier 🚴🏼

slapos cli: add slapos cachelookup commands

This commit remove the following 2 commands:
 * slapos cache lookup
 * slapos cache source
And introduces the following 3 commands:
 * slapos cachelookup binary-sr
 * slapos cachelookup url
 * slapos cachelookup pypi

This is useful to check if some objects are in shacache for buildout.
Please note that the commands are using the latest keys (introduced in
nexedi/slapos.buildout!26)

See merge request nexedi/slapos.core!376
parent 9860df52
Pipeline #20809 failed with stage
in 0 seconds
Changes Changes
======= =======
1.7.6 (unreleased yet)
------------------
* cachelookup: introduce slapos cachelookup {url, binary-sr, pypi} commands
* cache: remove "slapos cache {lookup, source}" commands
1.7.5 (2022-03-21) 1.7.5 (2022-03-21)
------------------ ------------------
* slapgrid: fix invocation of bootstrapBuildout * slapgrid: fix invocation of bootstrapBuildout
......
...@@ -407,28 +407,29 @@ Go to the SlapOS Master web page, click ``Account``, then ``Token``. ...@@ -407,28 +407,29 @@ Go to the SlapOS Master web page, click ``Account``, then ``Token``.
A token is valid for a single ``configure client`` command and will expire after one day. A token is valid for a single ``configure client`` command and will expire after one day.
cache lookup cachelookup binary-sr
~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~
.. program-output:: python slapos help cache lookup .. program-output:: python slapos help cachelookup binary-sr
Examples Examples
* See if the wordpress Software Release is available in precompiled format for our distribution:: * See if the KVM Software Release is available in precompiled format for our distribution::
$ slapos cache lookup http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.156:/software/kvm/software.cfg $ slapos cachelookup binary-sr https://lab.nexedi.com/nexedi/slapos/raw/1.0.232/software/kvm/software.cfg
Software URL: http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.156:/software/kvm/software.cfg Software URL: https://lab.nexedi.com/nexedi/slapos/raw/1.0.232/software/kvm/software.cfg
MD5: 4410088e11f370503e9d78db4cfa4ec4 MD5: 248e006f4d007ca7060b16c9b0cb6bc8
------------- --------------------------------------------------------------------
Available for: multiarch distribution version id compatible? verified?
distribution | version | id | compatible? --------------------------------------------------------------------
-----------------+--------------+----------------+------------- x86_64-linux-gnu Ubuntu 18.04 bionic no yes
CentOS | 6.3 | Final | no x86_64-linux-gnu Ubuntu 20.04 focal no yes
Fedora | 17 | Beefy Miracle | no x86_64-linux-gnu debian 10.9 no yes
Ubuntu | 12.04 | precise | yes x86_64-linux-gnu debian 11.2 yes yes
debian | 6.0.6 | | no x86_64-linux-gnu debian 8.11 no yes
debian | 7.0 | | no x86_64-linux-gnu debian 9.13 no yes
--------------------------------------------------------------------
You can also use the corresponding hash value in place of the URL. You can also use the corresponding hash value in place of the URL.
......
...@@ -96,8 +96,9 @@ setup(name=name, ...@@ -96,8 +96,9 @@ setup(name=name,
], ],
'slapos.cli': [ 'slapos.cli': [
# Utilities # Utilities
'cache lookup = slapos.cli.cache:CacheLookupCommand', 'cachelookup binary-sr = slapos.cli.cache_binarysr:CacheLookupCommand',
'cache source = slapos.cli.cache_source:CacheLookupCommand', 'cachelookup url = slapos.cli.cache_url:CacheLookupCommand',
'cachelookup pypi = slapos.cli.cache_pypi:CacheLookupCommand',
'complete = slapos.cli.complete:CompleteCommand', 'complete = slapos.cli.complete:CompleteCommand',
# SlapOS Node commands # SlapOS Node commands
'node bang = slapos.cli.bang:BangCommand', 'node bang = slapos.cli.bang:BangCommand',
......
...@@ -38,8 +38,6 @@ from slapos.grid import networkcache ...@@ -38,8 +38,6 @@ from slapos.grid import networkcache
from slapos.cli.config import ConfigCommand from slapos.cli.config import ConfigCommand
from slapos.util import str2bytes from slapos.util import str2bytes
FAILURE_EXIT_CODE = 10
class CacheLookupCommand(ConfigCommand): class CacheLookupCommand(ConfigCommand):
""" """
perform a query to the networkcache perform a query to the networkcache
...@@ -50,6 +48,7 @@ class CacheLookupCommand(ConfigCommand): ...@@ -50,6 +48,7 @@ class CacheLookupCommand(ConfigCommand):
cache of the software release, and which ones are compatible cache of the software release, and which ones are compatible
with the OS you are currently running. with the OS you are currently running.
""" """
command_group = 'cachelookup'
def get_parser(self, prog_name): def get_parser(self, prog_name):
ap = super(CacheLookupCommand, self).get_parser(prog_name) ap = super(CacheLookupCommand, self).get_parser(prog_name)
...@@ -88,11 +87,11 @@ def do_lookup(logger, cache_dir, cache_url, signature_certificate_list, ...@@ -88,11 +87,11 @@ def do_lookup(logger, cache_dir, cache_url, signature_certificate_list,
md5 = hashlib.md5(str2bytes(software_url)).hexdigest() md5 = hashlib.md5(str2bytes(software_url)).hexdigest()
try: try:
entries = networkcache.download_entry_list(cache_url, cache_dir, entries = networkcache.download_entry_list(cache_url, cache_dir,
md5, logger, signature_certificate_list, software_url) md5, logger, signature_certificate_list)
except Exception: except Exception:
logger.critical('Error while looking object %s', software_url, logger.critical('Error while looking object %s', software_url,
exc_info=True) exc_info=True)
return FAILURE_EXIT_CODE return 1
if not entries: if not entries:
logger.info('Object found in cache, but has no binary entries.') logger.info('Object found in cache, but has no binary entries.')
......
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010-2014 Vifib SARL and Contributors.
# All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import ast
import hashlib
import json
import re
import requests
import sys
import prettytable
from six.moves.urllib.error import HTTPError
from slapos.grid import networkcache
from slapos.cli.config import ConfigCommand
from slapos.cli.command import resetLogger
from slapos.util import str2bytes
class CacheLookupCommand(ConfigCommand):
"""
perform a query to the networkcache.
Check if python package is available to be downloaded from cache.
"""
command_group = 'cachelookup'
def get_parser(self, prog_name):
ap = super(CacheLookupCommand, self).get_parser(prog_name)
ap.add_argument('name',
help='python package name')
ap.add_argument('version',
help='python package version')
return ap
def take_action(self, args):
configp = self.fetch_config(args)
cache_dir = configp.get('networkcache', 'download-dir-url')
cache_url = configp.get('networkcache', 'download-cache-url')
signature_certificate_list = configp.get('networkcache', 'signature-certificate-list')
sys.exit(
do_lookup(self.app.log, cache_dir, cache_url,
signature_certificate_list, args.name, args.version))
def do_lookup(logger, cache_dir, cache_url, signature_certificate_list,
name, version):
key = 'pypi:{}={}'.format(name, version)
try:
entries = networkcache.download_entry_list(cache_url, cache_dir,
key, logger, signature_certificate_list)
if not entries:
logger.info('Object found in cache, but has no entry.')
return 0
pt = prettytable.PrettyTable(['basename', 'sha512', 'signed'])
for entry in entries:
d = json.loads(entry[0])
pt.add_row([d["basename"], d["sha512"], entry[1]])
logger.info('Python egg %s version %s', name, version)
logger.info('SHADIR URL: %s/%s\n', cache_dir, key)
resetLogger(logger)
for line in pt.get_string(border=True, padding_width=0, vrules=prettytable.NONE).split('\n'):
logger.info(line)
except HTTPError as e:
if e.code == 404:
logger.info('Object not found in cache.')
else:
logger.info('Problem to connect to shacache.')
return 1
except Exception:
logger.critical('Error while looking egg %s version %s', name, version,
exc_info=True)
return 1
return 0
...@@ -33,9 +33,10 @@ import json ...@@ -33,9 +33,10 @@ import json
import re import re
import requests import requests
import sys import sys
import prettytable import prettytable
from six.moves.urllib.error import HTTPError
from slapos.grid import networkcache from slapos.grid import networkcache
from slapos.cli.config import ConfigCommand from slapos.cli.config import ConfigCommand
from slapos.cli.command import resetLogger from slapos.cli.command import resetLogger
...@@ -47,6 +48,7 @@ class CacheLookupCommand(ConfigCommand): ...@@ -47,6 +48,7 @@ class CacheLookupCommand(ConfigCommand):
Check if source URL is available to be downloaded from cache. Check if source URL is available to be downloaded from cache.
""" """
command_group = 'cachelookup'
def get_parser(self, prog_name): def get_parser(self, prog_name):
ap = super(CacheLookupCommand, self).get_parser(prog_name) ap = super(CacheLookupCommand, self).get_parser(prog_name)
...@@ -56,43 +58,45 @@ class CacheLookupCommand(ConfigCommand): ...@@ -56,43 +58,45 @@ class CacheLookupCommand(ConfigCommand):
def take_action(self, args): def take_action(self, args):
configp = self.fetch_config(args) configp = self.fetch_config(args)
cache_dir = configp.get('networkcache', 'download-cache-url') cache_dir = configp.get('networkcache', 'download-dir-url')
sys.exit(do_lookup(self.app.log, cache_dir, args.url)) cache_url = configp.get('networkcache', 'download-cache-url')
signature_certificate_list = configp.get('networkcache', 'signature-certificate-list')
def do_lookup(logger, cache_dir, url): sys.exit(
md5 = hashlib.md5(str2bytes(url)).hexdigest() do_lookup(self.app.log, cache_dir, cache_url,
signature_certificate_list, args.url))
def do_lookup(logger, cache_dir, cache_url, signature_certificate_list,
url):
key = 'file-urlmd5:' + hashlib.md5(url.encode()).hexdigest()
try: try:
cached_url = '%s/slapos-buildout-%s' % (cache_dir, md5) entries = networkcache.download_entry_list(cache_url, cache_dir,
logger.debug('Connecting to %s', url) key, logger, signature_certificate_list)
req = requests.get(cached_url, timeout=5)
except (requests.Timeout, requests.ConnectionError):
logger.critical('Cannot connect to cache server at %s', cached_url)
return 10
if not req.ok:
if req.status_code == 404:
logger.critical('Object not in cache: %s', url)
else:
logger.critical('Error while looking object %s: %s', url, req.reason)
return 10
entries = req.json()
if not entries: if not entries:
logger.info('Object found in cache, but has no entries.') logger.info('Object found in cache, but has no entry.')
return 0 return 0
pt = prettytable.PrettyTable(['file', 'sha512']) pt = prettytable.PrettyTable(['url', 'sha512', 'signed'])
entry_list = sorted(json.loads(entry[0]) for entry in entries) for entry in entries:
for entry in entry_list: d = json.loads(entry[0])
pt.add_row([entry["file"], entry["sha512"]]) pt.add_row([d["url"], d["sha512"], entry[1]])
meta = json.loads(entries[0][0])
logger.info('Software source URL: %s', url) logger.info('Software source URL: %s', url)
logger.info('SHADIR URL: %s', cached_url) logger.info('SHADIR URL: %s/%s\n', cache_dir, key)
resetLogger(logger) resetLogger(logger)
for line in pt.get_string(border=True, padding_width=0, vrules=prettytable.NONE).split('\n'): for line in pt.get_string(border=True, padding_width=0, vrules=prettytable.NONE).split('\n'):
logger.info(line) logger.info(line)
except HTTPError as e:
if e.code == 404:
logger.info('Object not found in cache.')
else:
logger.info('Problem to connect to shacache.')
return 1
except Exception:
logger.critical('Error while looking object %s', url,
exc_info=True)
return 1
return 0 return 0
...@@ -76,7 +76,7 @@ def loadJsonEntry(jentry): ...@@ -76,7 +76,7 @@ def loadJsonEntry(jentry):
def download_entry_list(cache_url, dir_url, key, logger, def download_entry_list(cache_url, dir_url, key, logger,
signature_certificate_list, software_url): signature_certificate_list):
nc = NetworkcacheClient(cache_url, dir_url, nc = NetworkcacheClient(cache_url, dir_url,
signature_certificate_list=signature_certificate_list or None) signature_certificate_list=signature_certificate_list or None)
entry_list = nc.select_generic(key, filter=False) entry_list = nc.select_generic(key, filter=False)
......
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment