Commit 547ce36a authored by Romain Courteaud's avatar Romain Courteaud

Add Gitlab resilency test for theia

See merge request nexedi/slapos!1322
parents a113f183 831999cf
......@@ -14,7 +14,7 @@
# not need these here).
[instance.cfg]
filename = instance.cfg.in
md5sum = 8e8edd7dadb9c331fdba826312e7e8d2
md5sum = 8fabec3053c6453a0a4078ba89592da7
[watcher]
_update_hash_filename_ = watcher.in
......@@ -54,16 +54,12 @@ md5sum = 0f1ec4077dab586cc003ae13f689eda2
[instance-gitlab.cfg.in]
_update_hash_filename_ = instance-gitlab.cfg.in
md5sum = 0b023c7efd027f65b14e752484be2ec7
md5sum = 0445e54ee7ce1f65ec79801e128c80d4
[instance-gitlab-export.cfg.in]
_update_hash_filename_ = instance-gitlab-export.cfg.in
md5sum = 9ed8220bb3ad71ff7e8638354127412c
[instance-gitlab-test.cfg.in]
_update_hash_filename_ = instance-gitlab-test.cfg.in
md5sum = 7ba08928e6a8998ec8ed1bb97851b726
[macrolib.cfg.in]
_update_hash_filename_ = macrolib.cfg.in
md5sum = a56a44e96f65f5ed20211bb6a54279f4
......
[buildout]
extends =
{{ instance_gitlab_export_cfg }}
parts +=
install-demo-backup
[root-password]
passwd = root1234
[service-unicorn]
environment =
GITLAB_ROOT_PASSWORD=${root-password:passwd}
[service-postgresql]
pgdata-directory = ${directory:srv}/pg
[gitlab-workhorse-dir]
recipe = slapos.cookbook:mkdirectory
srv = ${directory:srv}/glab-wh
[gitlab-workhorse]
srv = ${gitlab-workhorse-dir:srv}
socket = ${gitlab-workhorse:srv}/wh.socket
[unicorn]
socket = ${:srv}/unc.socket
[publish-instance-info]
password = ${root-password:passwd}
# token for default.user user in gitlab demo backup
# Edit this token if needed
private-token = SLurtnxPscPsU-SDm4oN
# raw URL for latest commit on setup.py in gitlab demo backup.
latest-file-uri = ${:backend_url}/open/slapos/raw/94c96d42c22e16836dadddac7c8061f4a8c6ca7a/setup.py
[instance-parameter]
# backup more often, 10 minutes seems the minimal
configuration.backup_frequency = */10 * * * *
[install-demo-backup]
recipe = plone.recipe.command
stop-on-error = false
backup-done = ${directory:var}/backup.ready
command =
if [ -f "${:backup-done}" ]; then
echo "Demo backup installed."
else
rm -rf ${secrets:secrets} ${directory:var}/backup/* &&
mkdir -p ${secrets:secrets} ${directory:var}/tmp &&
cp -r {{ gitlab_demo_backup_path }}/secrets/* ${secrets:secrets} &&
cp -rf {{ gitlab_demo_backup_path }}/backup-gitlab.git/ ${directory:var}/tmp &&
cd ${directory:var}/tmp/backup-gitlab.git/
PATH=${directory:bin}:{{ gopath_bin }}:{{ git_location }}/bin:$PATH
gitlab-backup restore -vupok -go HEAD &&
touch ${:backup-done}
fi
update-command = ${:command}
......@@ -37,6 +37,7 @@ parts =
logrotate-entry-cron
on-reinstantiate
gitaly-socket-listening-promise
# std stuff for slapos instance
eggs-directory = {{ eggs_directory }}
......@@ -174,12 +175,18 @@ internal = ${:sockets}/internal
log = ${directory:log}/gitaly
[gitaly]
socket = ${gitaly-dir:sockets}/gitaly.socket
socket = ${directory:var}/gitaly.socket
log = ${gitaly-dir:log}
location = {{ gitaly_location }}
pid = ${directory:run}/gitaly.pid
internal_socket = ${gitaly-dir:internal}
[gitaly-socket-listening-promise]
<= monitor-promise-base
promise = check_socket_listening
name = gitaly_socket_listening_promise.py
config-pathname = ${gitaly:socket}
# 2. configuration files
[etc-template]
recipe = slapos.recipe.template:jinja2
......@@ -535,9 +542,9 @@ srv = ${directory:srv}/gitlab-workhorse
log = ${directory:log}/workhorse
[gitlab-workhorse]
srv = ${gitlab-workhorse-dir:srv}
socket = ${gitlab-workhorse:srv}/gitlab-workhorse.socket
log = ${gitlab-workhorse-dir:log}/gitlab-workhorse.log
srv = ${directory:srv}
socket = ${directory:srv}/gitlab-workhorse.socket
log = ${directory:log}/gitlab-workhorse.log
secret = ${secrets:secrets}/gitlab_workhorse_secret
[service-gitlab-workhorse]
......@@ -588,7 +595,7 @@ log = ${directory:log}/unicorn
[unicorn]
srv = ${unicorn-dir:srv}
log = ${unicorn-dir:log}
socket = ${:srv}/unicorn.socket
socket = ${directory:srv}/unicorn.socket
[service-unicorn]
recipe = slapos.cookbook:wrapper
......
......@@ -128,11 +128,3 @@ url = ${instance-gitlab.cfg.in:target}
url = ${instance-gitlab-export.cfg.in:target}
context-extra =
raw instance_gitlab_cfg $${instance-gitlab.cfg:output}
[instance-gitlab-test.cfg]
<= instance-cfg
url = ${instance-gitlab-test.cfg.in:target}
context-extra =
raw instance_gitlab_cfg $${instance-gitlab.cfg:output}
raw instance_gitlab_export_cfg $${instance-gitlab-export.cfg:output}
raw gitlab_demo_backup_path ${gitlab-demo-backup.git:location}
......@@ -135,7 +135,7 @@ git-executable = ${git:location}/bin/git
<= git-repository
repository = https://lab.nexedi.com/nexedi/gitlab-ce.git
# 9.5.10 + NXD patches:
revision = v9.5.10-9-g69b0ffae00bf
revision = v8.16.0.pre-16286-g9d66cd7b834
location = ${buildout:parts-directory}/gitlab
[gitlab-shell-repository]
......@@ -370,9 +370,6 @@ destination = ${buildout:directory}/${:_buildout_section_name_}
[instance-gitlab-export.cfg.in]
<= download-file
[instance-gitlab-test.cfg.in]
<= download-file
[macrolib.cfg.in]
<= download-file
......
......@@ -33,8 +33,9 @@ import re
import subprocess
import time
import unittest
import shutil
import requests
import tempfile
from datetime import datetime, timedelta
from six.moves.urllib.parse import urljoin
......@@ -53,6 +54,9 @@ erp5_software_release_url = os.path.abspath(
peertube_software_release_url = os.path.abspath(
os.path.join(
os.path.dirname(__file__), '..', '..', 'peertube', 'software.cfg'))
gitlab_software_release_url = os.path.abspath(
os.path.join(
os.path.dirname(__file__), '..', '..', 'gitlab', 'software.cfg'))
def setUpModule():
......@@ -443,3 +447,180 @@ class TestTheiaResiliencePeertube(test_resiliency.TestTheiaResilience):
partition = self._getPeertubePartition(servicename)
return self.getPartitionPath(
instance_type, 'srv', 'runner', 'instance', partition, *paths)
class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience):
test_instance_max_retries = 12
backup_max_tries = 480
backup_wait_interval = 60
_connection_parameters_regex = re.compile(r"{.*}", re.DOTALL)
_test_software_url = gitlab_software_release_url
def setUp(self):
self.temp_dir = os.path.realpath(tempfile.mkdtemp())
self.temp_clone_dir = os.path.realpath(tempfile.mkdtemp())
self.addCleanup(shutil.rmtree, self.temp_dir)
self.addCleanup(shutil.rmtree, self.temp_clone_dir)
def _getGitlabConnectionParameters(self, instance_type='export'):
out = self.captureSlapos(
'request', 'test_instance', self._test_software_url,
stderr=subprocess.STDOUT,
text=True,
)
print(out)
return json.loads(self._connection_parameters_regex.search(out).group(0).replace("'", '"'))
def test_twice(self):
# do nothing
pass
def _prepareExport(self):
super(TestTheiaResilienceGitlab, self)._prepareExport()
gitlab_partition = self._getGitlabPartitionPath('export', 'gitlab')
gitlab_rails_bin = os.path.join(gitlab_partition, 'bin', 'gitlab-rails')
os.chdir(self.temp_dir)
# Get Gitlab parameters
parameter_dict = self._getGitlabConnectionParameters()
backend_url = parameter_dict['backend_url']
print('Trying to connect to gitlab backend URL...')
response = requests.get(backend_url, verify=False)
self.assertEqual(requests.codes['OK'], response.status_code)
# Set the password and token
output = subprocess.check_output(
(gitlab_rails_bin, 'runner', "user = User.find(1); user.password = 'nexedi4321'; user.password_confirmation = 'nexedi4321'; user.save!"),
universal_newlines=True)
output = subprocess.check_output(
(gitlab_rails_bin, 'runner', "user = User.find(1); token = user.personal_access_tokens.create(scopes: [:api], name: 'Root token'); token.set_token('SLurtnxPscPsU-SDm4oN'); token.save!"),
universal_newlines=True)
# Create a new project
print("Gitlab create a project")
path = '/api/v3/projects'
parameter_dict = {'name': 'sample-test', 'namespace': 'open'}
# Token can be set manually
headers = {"PRIVATE-TOKEN" : 'SLurtnxPscPsU-SDm4oN'}
response = requests.post(backend_url + path, params=parameter_dict,
headers=headers, verify=False)
# Check the project is exist
print("Gitlab check project is exist")
path = '/api/v3/projects'
response = requests.get(backend_url + path, headers=headers, verify=False)
try:
projects = response.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Gitlab URL is incorrect.")
# Only one project exist
self.assertEqual(len(projects), 1)
# The project name is sample-test, which we created above.
self.assertIn("sample-test", projects[0]['name_with_namespace'])
# Get repo url, default one is http://lab.example.com/root/sample-test.git
# We need the path like http://[2001:67c:1254:e:c4::5041]:7777/root/sample-test
project_1 = projects[0]
repo_url = backend_url.replace("http://", "") + "/" + project_1['path_with_namespace']
# Clone the repo with token
clone_url = 'http://oauth2:' + 'SLurtnxPscPsU-SDm4oN@' + repo_url
repo_path = os.path.join(os.getcwd(), project_1['name'])
print(repo_path)
if os.path.exists(repo_path):
shutil.rmtree(repo_path, ignore_errors=True)
output = subprocess.check_output(('git', 'clone', clone_url), universal_newlines=True)
# Create a new file and push the commit
f = open(os.path.join(repo_path, 'file.txt'), 'x')
f.write('This is the new file.')
f.close()
output = subprocess.check_output(('git', 'add', '.'), cwd=repo_path, universal_newlines=True)
output = subprocess.check_output(('git', 'config', '--global', 'user.name', 'Resilience Test'), cwd=repo_path, universal_newlines=True)
output = subprocess.check_output(('git', 'config', '--global', 'user.email', 'resilience-test@example.com'), cwd=repo_path, universal_newlines=True)
output = subprocess.check_output(('git', 'commit', '-m', 'Initial commit'), cwd=repo_path, universal_newlines=True)
output = subprocess.check_output(('git', 'push', 'origin'), cwd=repo_path, universal_newlines=True)
# Do a fake periodically update
# Compute backup date in the near future
soon = (datetime.now() + timedelta(minutes=4))
frequency = "%d * * * *" % soon.minute
params = 'backup_frequency=%s' % frequency
# Update Peertube parameters
print('Requesting Gitlab with parameters %s' % params)
self.checkSlapos('request', 'test_instance', self._test_software_url, '--parameters', params)
self.checkSlapos('node', 'instance')
self.callSlapos('node', 'restart', 'all')
# Wait until after the programmed backup date, and a bit more
t = (soon - datetime.now()).total_seconds()
time.sleep(t + 240)
self.callSlapos('node', 'status')
os.chdir(self.temp_clone_dir)
repo_path = os.path.join(os.getcwd(), project_1['name'])
print(repo_path)
if os.path.exists(repo_path):
shutil.rmtree(repo_path, ignore_errors=True)
output = subprocess.check_output(('git', 'clone', clone_url), universal_newlines=True)
# Check the file we committed in exist and the content is matching.
output = subprocess.check_output(('git', 'show', 'origin/master:file.txt'), cwd=repo_path, universal_newlines=True)
self.assertIn('This is the new file.', output)
def _checkTakeover(self):
super(TestTheiaResilienceGitlab, self)._checkTakeover()
# Get Gitlab parameters
parameter_dict = self._getGitlabConnectionParameters()
backend_url = parameter_dict['backend_url']
# The temp dir which created in theia0, it should be exist and contains the repo
os.chdir(self.temp_dir)
# Check the project is exist
print("Gitlab check project is exist")
path = '/api/v3/projects'
headers = {"PRIVATE-TOKEN" : 'SLurtnxPscPsU-SDm4oN'}
response = requests.get(backend_url + path, headers=headers, verify=False)
try:
projects = response.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Gitlab URL is incorrect.")
# Only one project exist
self.assertEqual(len(projects), 1)
# The project name is sample-test, which we created above.
self.assertIn("sample-test", projects[0]['name_with_namespace'])
project_1 = projects[0]
repo_url = backend_url.replace("http://", "") + "/" + project_1['path_with_namespace']
clone_url = 'http://oauth2:' + 'SLurtnxPscPsU-SDm4oN@' + repo_url
repo_path = os.path.join(os.getcwd(), project_1['name'])
# Check the file we committed in the original theia is exist and the content is matching.
output = subprocess.check_output(('git', 'show', 'origin/master:file.txt'), cwd=repo_path, universal_newlines=True)
self.assertIn('This is the new file.', output)
def _getGitlabPartition(self, servicename):
p = subprocess.Popen(
(self._getSlapos(), 'node', 'status'),
stdout=subprocess.PIPE, universal_newlines=True)
out, _ = p.communicate()
found = set()
for line in out.splitlines():
if servicename in line:
found.add(line.split(':')[0])
if not found:
raise Exception("Gitlab %s partition not found" % servicename)
elif len(found) > 1:
raise Exception("Found several partitions for Gitlab %s" % servicename)
return found.pop()
def _getGitlabPartitionPath(self, instance_type, servicename, *paths):
partition = self._getGitlabPartition(servicename)
return self.getPartitionPath(
instance_type, 'srv', 'runner', 'instance', partition, *paths)
......@@ -84,11 +84,18 @@ class ResilientTheiaTestCase(ResilientTheiaMixin, TheiaTestCase):
@classmethod
def _deployEmbeddedSoftware(cls, software_url, instance_name, retries=0, instance_type='export'):
cls.callSlapos('supply', software_url, 'slaprunner', instance_type=instance_type)
for _ in range(retries):
try:
cls.captureSlapos('node', 'software', instance_type=instance_type, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
print(e.output)
raise
output = cls.captureSlapos('node', 'software', instance_type=instance_type, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
continue
print(output)
break
else:
if retries:
print("Wait before running slapos node software one last time")
time.sleep(120)
cls.checkSlapos('node', 'software', instance_type=instance_type)
cls.callSlapos('request', instance_name, software_url, instance_type=instance_type)
cls._processEmbeddedInstance(retries, instance_type)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment