Commit ff8de1c6 authored by Alain Takoudjou's avatar Alain Takoudjou

Update release candidates

parents b6748928 a49deb04
......@@ -28,7 +28,7 @@
##############################################################################
# XXX-Cedric : This is an import of
# http://git.erp5.org/gitweb/erp5.git/blob/HEAD:/product/ERP5Type/tests/ERP5TypeFunctionalTestCase.py
# https://lab.nexedi.com/nexedi/erp5/blob/HEAD/product/ERP5Type/tests/ERP5TypeFunctionalTestCase.py
# Modification of the present file should be ported back to this original file.
import os
import time
......
......@@ -39,7 +39,7 @@
"mariadb-url": {
"description": "Relational database access information",
"type": "string"
}
},
"jupyter-url": {
"description": "Jupyter notebook web UI access information",
"type": "string",
......
......@@ -6,7 +6,7 @@
#
# TODO better autogenerate from ^^^ (?)
#
# (last updated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.2+ce.0-3-g68d5ee8)
[gitlab-parameters]
configuration.external_url = http://lab.example.com
......@@ -45,8 +45,6 @@ configuration.default_projects_features.issues = true
configuration.default_projects_features.merge_requests = true
configuration.default_projects_features.wiki = true
configuration.default_projects_features.snippets = true
# NOTE can be public|private|internal
configuration.default_projects_features.visibility_level= public
#configuration.default_projects_features.builds = false
configuration.webhook_timeout = 10
......@@ -72,12 +70,12 @@ configuration.unicorn_worker_processes = 2
# unicorn advanced
configuration.unicorn_backlog_socket = 1024
configuration.unicorn_worker_memory_limit_min = 200*(1024**2)
configuration.unicorn_worker_memory_limit_max = 250*(1024**2)
configuration.unicorn_worker_memory_limit_min = 300*(1024**2)
configuration.unicorn_worker_memory_limit_max = 350*(1024**2)
# nginx
configuration.nginx_client_max_body_size = 250m
configuration.nginx_client_max_body_size = 0
# NOTE: we don't really need old ciphers - usually we talk directly to frontend only
configuration.nginx_ssl_ciphers = ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4
......
......@@ -15,11 +15,21 @@ die() {
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/recipes/postgresql.rb
# initial db setup
pgtables="$({{ psql_bin }} \
-h {{ pgsql['pgdata-directory'] }} \
-U {{ pgsql.superuser }} \
-d {{ pgsql.dbname }} \
-c '\d')" || die "pg query problem"
# ( first quering PG several times waiting a bit till postgresql is started and ready )
tpgwait=5
while true; do
pgtables="$({{ psql_bin }} \
-h {{ pgsql['pgdata-directory'] }} \
-U {{ pgsql.superuser }} \
-d {{ pgsql.dbname }} \
-c '\d')" && break
tpgwait=$(( $tpgwait - 1 ))
test $tpgwait = 0 && die "pg query problem"
echo "I: PostgreSQL is not ready (yet ?); will retry $tpgwait times..." 1>&2
sleep 1
done
echo "I: PostgreSQL ready." 1>&2
if echo "$pgtables" | grep -q '^No relations found' ; then
$RAKE db:schema:load db:seed_fu || die "initial db setup failed"
......
......@@ -118,6 +118,10 @@ var = ${directory:var}/gitlab
tmp = ${:var}/tmp
uploads = ${:var}/uploads
assets = ${:var}/assets
shared = ${:var}/shared
artifacts = ${:shared}/artifacts
lfs-objects = ${:shared}/lfs-objects
builds = ${:var}/builds
backup = ${directory:var}/backup
[gitlab-repo-dir]
......@@ -139,6 +143,10 @@ var = ${gitlab-dir:var}
tmp = ${gitlab-dir:tmp}
uploads = ${gitlab-dir:uploads}
assets = ${gitlab-dir:assets}
shared = ${gitlab-dir:shared}
artifacts = ${gitlab-dir:artifacts}
lfs-objects = ${gitlab-dir:lfs-objects}
builds = ${gitlab-dir:builds}
backup = ${gitlab-dir:backup}
repositories = ${gitlab-repo-xdir:repositories}
......@@ -241,7 +249,6 @@ context-extra =
section nginx nginx
section gitlab_work gitlab-work
section gitlab_workhorse gitlab-workhorse
section unicorn unicorn
[rack_attack.rb]
<= gitlab-etc-template
......@@ -358,14 +365,16 @@ update-command =
<= work-base
software = {{ gitlab_repository_location }}
tune-command =
# secret* config.ru tmp/ log/
# secret* config.ru tmp/ log/ shared/ builds/
rm -f .secret &&
rm -f config.ru &&
rm -rf log tmp &&
rm -rf log tmp shared builds &&
ln -sf ${secrets:secrets}/gitlab_rails_secret .secret &&
ln -sf ${config.ru:rendered} config.ru &&
ln -sf ${gitlab:log} log &&
ln -sf ${gitlab:tmp} tmp &&
ln -sf ${gitlab:shared} shared &&
ln -sf ${gitlab:builds} builds &&
# config/
cd config &&
ln -sf ${unicorn.rb:rendered} unicorn.rb &&
......@@ -515,6 +524,7 @@ command-line = {{ gitlab_workhorse }}
-listenNetwork unix
-listenAddr ${gitlab-workhorse:socket}
-authSocket ${unicorn:socket}
-documentRoot ${gitlab-work:location}/public
# NOTE for profiling
# -pprofListenAddr ...
......@@ -532,10 +542,7 @@ depend =
[promise-gitlab-workhorse]
<= promise-byurl
# gitlab-workhorse works on repositories. Here we only check it accepts an
# serves requests, so request is non-existent URL and expected code is 403
url = --unix-socket ${gitlab-workhorse:socket} http:/non-existent
http_code = 403
url = --unix-socket ${gitlab-workhorse:socket} http:/static.css
# gitlab-workhorse logs to stdout/stderr - logs are handled by slapos not us
......@@ -625,7 +632,7 @@ log = ${sidekiq-dir:log}
# NOTE see queue list here:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/Procfile
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/sv-sidekiq-run.erb
# (last updated for ominbus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
[service-sidekiq]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:service}/sidekiq
......@@ -638,6 +645,7 @@ command-line =
${gitlab-sidekiq:wrapper-path}
# XXX -q runner ? (present in gitlab-ce/Procfile but not in omnibus)
# XXX -q pages -q elasticsearch ? (present in omnibus but not in gitlab-ce -- those features are gitlab-ee only)
# XXX -P ? (pidfile)
-e production
-r ${gitlab-work:location}
......@@ -646,7 +654,7 @@ command-line =
-L ${sidekiq:log}/sidekiq.log
-q post_receive
-q mailer
-q mailers
-q archive_repo
-q system_hook
-q project_web_hook
......@@ -729,6 +737,8 @@ depend =
[promise-nginx]
<= promise-byurl
# XXX this depends on gitlab-workhorse being up
# (nginx is configured to proxy all requests to gitlab-workhorse)
url = ${backend-info:url}/static.css
[logrotate-entry-nginx]
......
......@@ -109,25 +109,25 @@ git-executable = ${git:location}/bin/git
<= git-repository
#repository = https://gitlab.com/gitlab-org/gitlab-ce.git
repository = https://lab.nexedi.com/kirr/gitlab-ce.git
# 8.2.X + NXD patches:
revision = v8.2.3-9-g79c127e6e068a619c53a8c22f1db8c1e28ec87d2
# 8.4.X + NXD patches:
revision = v8.4.4-18-g32fa1180aca0b5c119f9a42ecfc733db1fad1dc0
location = ${buildout:parts-directory}/gitlab
[gitlab-shell-repository]
<= git-repository
repository = https://gitlab.com/gitlab-org/gitlab-shell.git
# gitlab 8.2 wants gitlab-shell 2.6.8
# 2.6.8 + NXD patches
revision = v2.6.8-2-g216d7e15fe06917198891a895f762ba84fdcc4d4
# gitlab 8.4 wants gitlab-shell 2.6.10
# 2.6.10
revision = v2.6.10-0-g82b3a4e8f70692ec679d880628fdb0f5844d42b9
location = ${buildout:parts-directory}/gitlab-shell
[gitlab-workhorse-repository]
<= git-repository
#repository = https://gitlab.com/gitlab-org/gitlab-workhorse.git
repository = https://lab.nexedi.com/kirr/gitlab-workhorse.git
# 0.4.X + NXD patches for blob download speedup
# 0.6.X + NXD patches for blob download speedup
# (https://gitlab.com/gitlab-org/gitlab-workhorse/merge_requests/17)
revision = 0.4.1-23-g2beb8c9539433f072e3db540f91f75894ca6b1b0
revision = 0.6.1-2-ga23a5e18486b0de6e3435711dc555c8bfe08fde2
location = ${buildout:parts-directory}/gitlab-workhorse
......@@ -219,35 +219,35 @@ url = ${:_profile_base_location_}/template/${:_buildout_section_name_}
[config.ru.in]
<= download-template
md5sum = bb12852c28079f40a0751f7f3559e2a6
md5sum = 3ed3c439ac1b93f75121dabcea126078
[database.yml.in]
<= download-template
md5sum = ee656cfd96e1c82df167f68bb5773291
md5sum = b33f4f2f49a5a3e3e6542357c555a3a3
[gitconfig.in]
<= download-template
md5sum = f4cb11e8bca379e016b062d0db859b74
md5sum = 75f620ea0751fc8d2dc717cf929d29f3
[gitlab-parameters.cfg]
<= download-file
md5sum = bc98ec10209bc53f6a49888b1a2b9382
md5sum = 2cfd3bbf9da10627044ca3a9a149fdbb
[gitlab-shell-config.yml.in]
<= download-template
md5sum = ea351e16b47f0008f61211eb2d7685e2
md5sum = f061d529b71241d58affbf7aec5c8af1
[gitlab-unicorn-startup.in]
<= download-file
md5sum = 2716afaa9445c0c429c6b211356ebe8f
md5sum = 14c5632182d830c03f7788c85d6f4da1
[gitlab.yml.in]
<= download-template
md5sum = cc32f5053dd2a2461aa5952a5b925310
md5sum = cd7aaeeb1917fdedb7656943065c0a9c
[instance-gitlab.cfg.in]
<= download-file
md5sum = dfd2b14f846eda999fe9d12108d513b4
md5sum = 5f0b7d51e921e0e37e40fb65552dde9c
[macrolib.cfg.in]
<= download-file
......@@ -255,27 +255,27 @@ md5sum = a56a44e96f65f5ed20211bb6a54279f4
[nginx-gitlab-http.conf.in]
<= download-template
md5sum = 590da2b00cd198c7bc261c3d893bc199
md5sum = 3b494fe8425a12e4a7fd3a9bb17f88f8
[nginx.conf.in]
<= download-template
md5sum = f1a6e2bce3f28a2243fed49d1e1601df
md5sum = dc16257d49d1fc1ae6e7d10865898201
[rack_attack.rb.in]
<= download-template
md5sum = 16503c029159ea6db7d0fb5ab67093a3
md5sum = fbea569a1ac9ee46e37d0b98b5441169
[resque.yml.in]
<= download-template
md5sum = 7d9cba658f9315cd058dfc74db943a66
md5sum = 2cd97d9f5906d06e00774dd2e4e6af0e
[smtp_settings.rb.in]
<= download-template
md5sum = c7c09c241b5fa8163e4995260be52604
md5sum = 75b9e0325737ca5ecbf938443a5d3321
[unicorn.rb.in]
<= download-template
md5sum = 9bdca16362fe19c727bca38383e57068
md5sum = 1b55105a3de1ef13260ac3faa30d6e85
[versions]
......
......@@ -2,7 +2,7 @@
# see:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config.ru
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/gitlab-rails-config.ru.erb
# (last updated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
# This file is used by Rack-based servers to start the application.
......
......@@ -2,7 +2,7 @@
# see:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/database.yml.postgresql
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/database.yml.erb
# (last updated for 8.2.3+ce.0-0-g8eda093)
# (last updated for 8.4.4+ce.0-0-g1680742)
{% from 'macrolib.cfg.in' import cfg with context %}
......
......@@ -3,7 +3,7 @@
# see:
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/attributes/default.rb
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/gitconfig.erb
# (last updated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
#
{% from 'macrolib.cfg.in' import cfg with context %}
......
......@@ -2,7 +2,7 @@
# see:
# https://gitlab.com/gitlab-org/gitlab-shell/blob/master/config.yml.example
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/gitlab-shell-config.yml.erb
# (last updated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
# GitLab user. git by default
user: {{ backend_info.user }}
......
......@@ -2,7 +2,7 @@
# see:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/gitlab.yml.example
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/gitlab.yml.erb
# (last updated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
{% from 'macrolib.cfg.in' import cfg, cfg_https, external_url with context %}
......@@ -50,19 +50,14 @@ production: &base
default_can_create_group: {{ cfg('default_can_create_group') }} # default: true
username_changing_enabled: {{ cfg('username_changing_enabled') }} # default: true - User can change her username/namespace
## Default theme
## BASIC = 1
## MARS = 2
## MODERN = 3
## GRAY = 4
## COLOR = 5
## 1 - Graphite
## 2 - Charcoal
## 3 - Green
## 4 - Gray
## 5 - Violet
## 6 - Blue
default_theme: {{ cfg('default_theme') }} # default: 2
{# we do not need to restrict visibility levels
# Restrict setting visibility levels for non-admin users.
# The default is to allow all levels.
restricted_visibility_levels: <%= @gitlab_restricted_visibility_levels unless @gitlab_restricted_visibility_levels.nil? %>
#}
{# for now we are ok with default issue-closing pattern
## Automatic issue closing
# If a commit message matches this regular expression, all issues referenced from the matched text will be closed.
......@@ -78,7 +73,6 @@ production: &base
merge_requests: {{ cfg('default_projects_features.merge_requests') }}
wiki: {{ cfg('default_projects_features.wiki') }}
snippets: {{ cfg('default_projects_features.snippets') }}
visibility_level: '{{ cfg("default_projects_features.visibility_level") }}' # can be "private" | "internal" | "public"
builds: false {# builds not supported yet <%= @gitlab_default_projects_features_builds %> #}
## Webhook settings
......@@ -139,6 +133,26 @@ production: &base
storage_path: <%= @lfs_storage_path %>
#}
{# we do not support Pages
## GitLab Pages (EE only)
pages:
enabled: <%= @pages_enabled %>
path: <%= @pages_path %>
host: <%= @pages_host %>
port: <%= @pages_port %>
https: <%= @pages_https %>
#}
{# we do not support Elasticsearch
## Elasticsearch (EE only)
# Enable it if you are going to use elasticsearch instead of
# regular database search
elasticsearch:
enabled: <%= @elasticsearch_enabled %>
host: <%= @elasticsearch_host %>
port: <%= @elasticsearch_port %>
#}
## Gravatar
## For Libravatar see: http://doc.gitlab.com/ce/customization/libravatar.html
gravatar:
......@@ -149,6 +163,33 @@ production: &base
#}
{# XXX cron jobs are disabled for now - we do not support CI and EE features
## Auxiliary jobs
# Periodically executed jobs, to self-heal GitLab, do external synchronizations, etc.
# Please read here for more information: https://github.com/ondrejbartas/sidekiq-cron#adding-cron-job
cron_jobs:
# Flag stuck CI builds as failed
stuck_ci_builds_worker:
cron: <%= @stuck_ci_builds_worker_cron %>
##
# GitLab EE only jobs:
# Snapshot active users statistics
historical_data_worker:
cron: <%= @historical_data_worker_cron %>
# Update mirrored repositories
update_all_mirrors_worker:
cron: <%= @update_all_mirrors_worker_cron %>
# In addition to refreshing users when they log in,
# periodically refresh LDAP users membership.
# NOTE: This will only take effect if LDAP is enabled
ldap_sync_worker:
cron: <%= @ldap_sync_worker_cron %>
#}
#
# 2. GitLab CI settings
# ==========================
......@@ -272,7 +313,7 @@ production: &base
<% end %>
#}
{# default ($RAILS_ROOT/shared/) is just ok
{# default ($RAILS_ROOT/shared/) is ok - we symlinked it to proper place
# Shared file storage settings
shared:
path: <%= @shared_path %>
......@@ -330,6 +371,10 @@ production: &base
ssh_port: <%= @gitlab_shell_ssh_port %>
#}
# git-annex support (EE only)
# If this setting is set to true, the same setting in config.yml of
# gitlab-shell needs to be set to true
git_annex_enabled: <%= @git_annex_enabled %>
## Git settings
# CAUTION!
......
{{ autogenerated }}
# see:
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/nginx-gitlab-http.conf.erb
# (last updated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
{% from 'macrolib.cfg.in' import cfg, cfg_bool, cfg_https, fqdn with context %}
......@@ -33,10 +33,6 @@
## configuration ##
###################################
upstream gitlab {
server unix:{{ unicorn.socket }} fail_timeout=0;
}
upstream gitlab-workhorse {
server unix:{{ gitlab_workhorse.socket }};
}
......@@ -109,12 +105,6 @@ server {
error_log {{ nginx.log }}/gitlab_error.log;
location / {
## Serve static files from defined root folder.
## @gitlab is a named location for the upstream fallback, see below.
try_files $uri /index.html $uri.html @gitlab;
}
location /uploads/ {
## If you use HTTPS make sure you disable gzip compression
## to be safe against BREACH attack.
{{ 'gzip off;' if cfg_https else ''}}
......@@ -125,105 +115,7 @@ server {
proxy_connect_timeout {{ cfg('nginx_proxy_connect_timeout') }};
proxy_redirect off;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
{% if cfg_https %}
proxy_set_header X-Forwarded-Ssl on;
{% endif %}
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto {{ "https" if cfg_https else "http" }};
proxy_set_header X-Frame-Options SAMEORIGIN;
proxy_pass http://gitlab;
}
## If a file, which is not found in the root folder is requested,
## then the proxy passes the request to the upsteam (gitlab unicorn).
location @gitlab {
## If you use HTTPS make sure you disable gzip compression
## to be safe against BREACH attack.
{{ 'gzip off;' if cfg_https else ''}}
## https://github.com/gitlabhq/gitlabhq/issues/694
## Some requests take more than 30 seconds.
proxy_read_timeout {{ cfg('nginx_proxy_read_timeout') }};
proxy_connect_timeout {{ cfg('nginx_proxy_connect_timeout') }};
proxy_redirect off;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
{% if cfg_https %}
proxy_set_header X-Forwarded-Ssl on;
{% endif %}
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto {{ "https" if cfg_https else "http" }};
proxy_set_header X-Frame-Options SAMEORIGIN;
proxy_pass http://gitlab;
}
location ~ ^/[\w\.-]+/[\w\.-]+/gitlab-lfs/objects {
client_max_body_size 0;
# 'Error' 418 is a hack to re-use the @gitlab-workhorse block
error_page 418 = @gitlab-workhorse;
return 418;
}
location ~ ^/[\w\.-]+/[\w\.-]+/(info/refs|git-upload-pack|git-receive-pack)$ {
client_max_body_size 0;
# 'Error' 418 is a hack to re-use the @gitlab-workhorse block
error_page 418 = @gitlab-workhorse;
return 418;
}
location ~ ^/[\w\.-]+/[\w\.-]+/repository/archive {
client_max_body_size 0;
# 'Error' 418 is a hack to re-use the @gitlab-workhorse block
error_page 418 = @gitlab-workhorse;
return 418;
}
location ~ ^/api/v3/projects/.*/repository/archive {
client_max_body_size 0;
# 'Error' 418 is a hack to re-use the @gitlab-workhorse block
error_page 418 = @gitlab-workhorse;
return 418;
}
# Build artifacts should be submitted to this location
location ~ ^/[\w\.-]+/[\w\.-]+/builds/download {
client_max_body_size 0;
# 'Error' 418 is a hack to re-use the @gitlab-workhorse block
error_page 418 = @gitlab-workhorse;
return 418;
}
# Build artifacts should be submitted to this location
location ~ /ci/api/v1/builds/[0-9]+/artifacts {
client_max_body_size 0;
# 'Error' 418 is a hack to re-use the @gitlab-workhorse block
error_page 418 = @gitlab-workhorse;
return 418;
}
# access to raw blobs -> @gitlab-workhorse
location ~ ^/[\w\.-]+/[\w\.-]+/raw/ {
client_max_body_size 0;
error_page 418 = @gitlab-workhorse;
return 418;
}
location @gitlab-workhorse {
client_max_body_size 0;
## If you use HTTPS make sure you disable gzip compression
## to be safe against BREACH attack.
{{ 'gzip off;' if cfg_https else ''}}
## https://github.com/gitlabhq/gitlabhq/issues/694
## Some requests take more than 30 seconds.
proxy_read_timeout {{ cfg('nginx_proxy_read_timeout') }};
proxy_connect_timeout {{ cfg('nginx_proxy_connect_timeout') }};
proxy_redirect off;
proxy_http_version 1.1;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
......@@ -236,21 +128,6 @@ server {
proxy_pass http://gitlab-workhorse;
}
## Enable gzip compression as per rails guide:
## http://guides.rubyonrails.org/asset_pipeline.html#gzip-compression
## WARNING: If you are using relative urls remove the block below
## See config/application.rb under "Relative url support" for the list of
## other files that need to be changed for relative url support
location ~ ^/(assets)/ {
root {{ gitlab_work.location }}/public;
gzip_static on; # to serve pre-gzipped version
expires max;
add_header Cache-Control public;
}
error_page 502 /502.html;
{# we don't support custom nginx configs
<%= @custom_gitlab_server_config %>
#}
......
......@@ -2,7 +2,7 @@
# see:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/support/nginx/gitlab-ssl
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/nginx.conf.erb
# (last updated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
{% from 'macrolib.cfg.in' import cfg with context %}
......@@ -43,8 +43,9 @@ http {
include {{ nginx_gitlab_http_conf }};
{# we don't need: ci, mattermost
{# we don't need: ci, pages, mattermost
include <%= @gitlab_ci_http_config %>
include <%= @gitlab_pages_http_config %>;
include <%= @gitlab_mattermost_http_config %>
#}
}
......@@ -2,7 +2,7 @@
# see:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/initializers/rack_attack.rb.example
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/rack_attack.rb.erb
# (last updated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
{% from 'macrolib.cfg.in' import cfg with context %}
......
......@@ -2,6 +2,6 @@
# see:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/resque.yml.example
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/resque.yml.erb
# (last udpdated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last udpdated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
production: unix://{{ redis.unixsocket }}
......@@ -2,7 +2,7 @@
# see:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/initializers/smtp_settings.rb.sample
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/smtp_settings.rb.erb
# (last updated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
{% from 'macrolib.cfg.in' import cfg, cfg_bool with context %}
......
......@@ -3,7 +3,7 @@
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/unicorn.rb.example
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/unicorn.rb.example.development
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/unicorn.rb.erb
# (last updated for omnibus-gitlab 8.2.3+ce.0-0-g8eda093)
# (last updated for omnibus-gitlab 8.4.4+ce.0-0-g1680742)
{% from 'macrolib.cfg.in' import cfg with context %}
......
......@@ -43,7 +43,7 @@ md5sum = d7d4a7e19d55bf14007819258bf42100
[erp5-kernel]
<= download-file-base
filename = ERP5kernel.py.jinja
md5sum = 3dfc6a7c16828bff55dec4cf96b730d3
md5sum = eb07520fc189dcac2d3bd2db1368b1ba
[kernel-json]
<= download-file-base
......
......@@ -265,7 +265,25 @@ class ERP5Kernel(Kernel):
mime_type = 'text/plain'
try:
content = json.loads(self.response)
# Example format for the json result we are expecting is :
# content = {
# "status": "ok",
# "ename": null,
# "evalue": null,
# "traceback": null,
# "code_result": "",
# "mime_type": "text/plain"
# }
# So, we can easily use any of the key to update values as such.
# Getting code_result for succesfull execution of code
code_result = content['code_result']
# Update mime_type with the mime_type from the http response result
# Required in case the mime_type is anything other than 'text/plain'
mime_type = content['mime_type']
# Display to frontend the error message for content status as 'error'
if content['status']=='error':
reply_content = {
......
......@@ -36,7 +36,7 @@ git-executable = ${git:location}/bin/git
[erp5.util-repository]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/erp5.git
repository = https://lab.nexedi.com/nexedi/erp5.git
branch = master
git-executable = ${git:location}/bin/git
......
This diff is collapsed.
# NayuOS
This is a SlapOS recipe to build NayuOS. It needs to be put in the <code>/srv/slapgrid/\<part\>/srv/runner/project/slapos/software/</code> directory. The created directory is called <code>\<nayuos_build_dirname\></code> in this documentation.
## License
GPL v2 or later
## Requirements
* sudo and git on the host (for now)
* slapuser with sudo rights to execute the cros_sdk scripts (needed to access the chroot environment provided by Chromium OS)
in /etc/sudoers:
<pre><code>
\<slapuser\> ALL= NOPASSWD: /srv/slapgrid/\<part\>/srv/runner/instance/\<inst_part\>/parts/clone-depot-tools/cros_sdk, /srv/slapgrid/\<part\>/srv/runner/instance/\<inst_part\>/parts/chromiumos/\<release\>/chromite/bin/cros_sdk, /srv/slapgrid/\<part\>/srv/runner/instance/\<inst_part\>/parts/chromiumos/\<release\>/chromite/bootstrap/cros_sdk, /srv/slapgrid/\<part\>/srv/runner/instance/\<inst_part\>/parts/chromiumos/\<release\>/chromite/scripts/cros_sdk.py, /srv/slapgrid/\<part\>/srv/runner/instance/\<inst_part\>/parts/chromiumos/\<release\>/chromite/scripts/cros_sdk.pyc, /srv/slapgrid/\<part\>/srv/runner/instance/\<inst_part\>/parts/chromiumos/\<release\>/chromium/tools/depot_tools/cros_sdk
</code></pre>
## Input
In the vifib parameters (softinst\<nb\>.host.vifib.net \> Services \> Parameters):
* board / ex: peppy, swanky, ... (choosing daisy will accept all licenses for the daisy board build only, in order to use Mali drivers, see [chromium mailing list](https://groups.google.com/a/chromium.org/forum/#!topic/chromium-os-dev/Pf9ZG2itxWM))
* branch / ex: release-R46-7390.B
* keep_cache / yes|no (choosing "no" saves about 15Go of disk space per board, choosing "yes" will makes next build faster and less expensive in term of needed ressources because of not rebuilding everything)
## Output
The image will be produced in:
<code>/srv/slapgrid/\<part\>/srv/runner/instance/\<inst_part\>/parts/chromiumos/images/</code>
and the logs are in:
<code>/srv/slapgrid/\<part\>/srv/runner/instance/\<inst_part\>/var/log/cros_sources_dl.log</code> and <code>/srv/slapgrid/\<part\>/srv/runner/instance/\<inst_part\>/var/log/cros_build.log</code>
The script that download the sources and build is located in
<code>/srv/slapgrid/\<part\>/srv/runner/instance/\<inst_part\>/etc/run</code>
## External documents
* [ <code>repo</code> command reference ](https://source.android.com/source/using-repo.html)
* [ NayuOS official website ](https://www.nayuos.org)
* [ crouton for chroot ](https://github.com/dnschneid/crouton) ([warning about verified boot](https://github.com/dnschneid/crouton/blob/2a1fc9da380650f47e2bcf37d00962bfb68c4830/installer/main.sh#L517-L536))
## Notes for possible improvements
* [ Running virtual machines on your chromebook ](https://www.chromium.org/chromium-os/developer-information-for-chrome-os-devices/running-virtual-machines-on-your-chromebook)
* to have a more common User Agent (the one of ChromiumOS/NayuOS is quite rare and identifies the user, see [studies of the EFF](https://panopticlick.eff.org/static/browser-uniqueness.pdf)), it seems possible to change the User-Agent flag for guest mode in the getOffTheRecord function, and adding a line (key "kUserAgent" , value "some common user agent" string). Then rebuild Chromium and [add it to NayuOS](https://www.chromium.org/chromium-os/developer-guide#TOC-Making-changes-to-the-Chromium-web-).
[buildout]
parts =
parameters
template-full-build-script
# eggs given by software.cfg
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
#################################### Tools, sources getter ##################################
[depot-tools]
# needed for the "repo" command (to download many git repositories)
recipe = slapos.recipe.build:gitclone
repository = https://chromium.googlesource.com/chromium/tools/depot_tools.git
branch = master
[use-depot-tools]
export-path =
export PATH=${depot-tools:location}:"$PATH";
[nayuos-ebuilds]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/nayuos-ebuilds.git
############################################################################################
[parameters]
# for use of the "parameters" XML configuration given from the
# webrunner interface
# if needed, .serialized => json (more complex structures are OK, but need another
# jinja template for reading the content of the parameters)
recipe = slapos.cookbook:slapconfiguration
computer = ${slap_connection:computer_id}
partition = ${slap_connection:partition_id}
url = ${slap_connection:server_url}
key = ${slap_connection:key_file}
cert = ${slap_connection:cert_file}
[directory]
recipe = slapos.cookbook:mkdirectory
log = ${buildout:directory}/var/log
run = ${buildout:directory}/etc/run
cros_location = ${buildout:directory}/parts/chromiumos
ebuilds_dir = ${nayuos-ebuilds:location}
scripts_dir = {{ scripts_dir }}
logo_dir = {{ logo_dir }}
[template-full-build-script]
# create the instance.cfg file in the buildout directory
# (use jinja for templating)
recipe = slapos.recipe.template:jinja2
template = {{ scripts_dir }}/cros_full_build.in
rendered = ${directory:run}/cros_full_build
md5sum = 76942f32e682b36397b41db28297625d
mode = 0770
context =
# for access to the eggs from the instance
key instance_log_dir directory:log
key cros_location directory:cros_location
key depot_tools_export_path_cmd use-depot-tools:export-path
key branch parameters:configuration.branch
key boards_list parameters:configuration.boards
key keep_cache parameters:configuration.keep_cache
key ebuilds_dir directory:ebuilds_dir
key scripts_dir directory:scripts_dir
key logo_dir directory:logo_dir
raw nayu_dev_packages net-libs/nodejs net-misc/re6stnet dev-vcs/git dev-python/flask dev-python/virtualenv sys-fs/cryptsetup
\ No newline at end of file
#!/bin/bash
#XXX: should not depend on the host's bash ^
######################## Download of sources using the "repo" command ########################
DL_LOG={{ instance_log_dir }}/cros_sources_dl.log
BRANCH={{ branch }}
CHROMIUM_OVERLAY={{ cros_location }}/{{ branch }}/src/third_party/chromiumos-overlay
echo "getting Chromium OS sources..." >> $DL_LOG
{{ depot_tools_export_path_cmd }}
echo "{{ cros_location }}/{{ branch }}"
install -d "{{ cros_location }}/{{ branch }}"
cd "{{ cros_location }}/{{ branch }}"
repo init -u https://chromium.googlesource.com/chromiumos/manifest.git -b {{ branch }} >> $DL_LOG
# in case changes were already made, stash changes to be able to pull
cd ${CHROMIUM_OVERLAY}
git stash
repo sync >> $DL_LOG
############################## Prepare chroot environment ###################################
BUILD_LOG={{ instance_log_dir }}/cros_build.log
TEST_LOG={{ instance_log_dir }}/tests_nayuos_image.log
{{ depot_tools_export_path_cmd }}
cd {{ cros_location }}/{{ branch }}
# create chroot environment (exit on failure)
cros_sdk --download || { echo "Problem entering chroot or downloading chroot environment. Exiting." ; exit 1 ; }
# compile Python with sqlite support (-> change USE flag) for dev-lang/python
# it is needed by re6st
# just need to change -sqlite by sqlite on the right line
# same for IPv6 support
PACKAGE_USE_FILE=${CHROMIUM_OVERLAY}/profiles/targets/chromeos/package.use
line_number=$(sed -n '/dev-lang\/python/=' ${PACKAGE_USE_FILE})
if [ $line_number ] ; then
echo $line_number
sed -i "${line_number}s/-sqlite/sqlite/" ${PACKAGE_USE_FILE}
if [[ ! $(sed -n ${line_number}p ${PACKAGE_USE_FILE} | grep ipv6) ]]; then
sed -i "${line_number}s/$/ ipv6/" ${PACKAGE_USE_FILE}
fi
fi
# add some ebuilds, specific to NayuOS
for category in $( ls {{ ebuilds_dir }} ); do
echo ${category}
cp -R {{ ebuilds_dir }}/${category}/* ${CHROMIUM_OVERLAY}/${category}/
done
install -m 770 "{{ scripts_dir }}/test_nayuos_image" "{{ cros_location }}/{{ branch }}/src/scripts/test_nayuos_image"
### packages management ###
BASE_CHROMEOS_DEV_ROOT_EBUILD=${CHROMIUM_OVERLAY}/chromeos-base/chromeos-dev-root/chromeos-dev-root-0.0.1.ebuild
VIRTUAL_CHROMEOS_OS_DEV_EBUILD=${CHROMIUM_OVERLAY}/virtual/target-chromium-os-dev/target-chromium-os-dev-1.ebuild
# change the virtual ebuild responsible for installing all packages to add the ones
# needed for NayuOS
for package in {{ nayu_dev_packages }} ; do
echo $package
if [[ $( grep "${package}" "${VIRTUAL_CHROMEOS_OS_DEV_EBUILD}" ) ]] ; then
echo "no need to change ${VIRTUAL_CHROMEOS_OS_DEV_EBUILD} file to add ${package}..." >> "${BUILD_LOG}"
else
printf "\n\nRDEPEND=\"\${RDEPEND}\n ${package}\"\n">> ${VIRTUAL_CHROMEOS_OS_DEV_EBUILD}
fi
done
# do not install the Upstart init script that starts ssh daemon at boot time
rm ${CHROMIUM_OVERLAY}/chromeos-base/chromeos-sshd-init/files/openssh-server.conf
sed -i -n '/src_install/q;p' ${CHROMIUM_OVERLAY}/chromeos-base/openssh-server-init/openssh-server-init-0.0.1.ebuild
sed -i -n '/src_install/q;p' ${CHROMIUM_OVERLAY}/chromeos-base/chromeos-sshd-init/chromeos-sshd-init-0.0.1.ebuild
# bashrc modifications
BASH_EBUILD_DIR={{ cros_location }}/{{ branch }}/src/third_party/portage-stable/app-shells/bash
if [[ $(grep "git --exec-path" "${BASH_EBUILD_DIR}/files/dot-bashrc") ]] ; then
echo "git alias already set"
else
echo "# git quickfix for finding right git executables
if [ -d /usr/local/libexec/git-core/ ] ; then
alias git='git --exec-path=/usr/local/libexec/git-core/'
fi
# git quickfix for finding 'less' and using it as pager
less_path=\$(which less 2>/dev/null)
if [ "\${less_path}" ] ; then
git config --global core.pager \${less_path}
fi" >> ${BASH_EBUILD_DIR}/files/dot-bashrc
fi
# increase the revision number
# XXX: remove hardcoded revision numbers
cd ${CHROMIUM_OVERLAY}/chromeos-base/openssh-server-init/
ln -s openssh-server-init-0.0.1-r24.ebuild openssh-server-init-0.0.1-r25.ebuild
cd ${CHROMIUM_OVERLAY}/chromeos-base/chromeos-sshd-init/
ln -s chromeos-sshd-init-0.0.1-r5.ebuild chromeos-sshd-init-0.0.1-r6.ebuild
cd ${BASH_EBUILD_DIR}
ln -s bash-4.2_p48-r1.ebuild bash-4.2_p48-r2.ebuild
######################################## Build ##############################################
BOARDS="{{ boards_list }}"
KEEP_CACHE="{{ keep_cache }}"
for board in ${BOARDS} ; do
echo ${board}
if [ ${board} == daisy ] ; then
echo "daisy board: accepting license for Mali drivers..."
cros_sdk -- sudo sh -c "cp /etc/make.conf.user /etc/make.conf.user.save"
cros_sdk -- sudo sh -c "echo 'ACCEPT_LICENSE=\"*\"' >> /etc/make.conf.user"
fi
# preparing packages (for chroot and image)
date >> "${BUILD_LOG}"
echo "building packages for a ${board}-flavoured Chromium OS..." >> "${BUILD_LOG}"
cros_sdk -- ./build_packages --board=${board} >> "${BUILD_LOG}"
# change boot pictures
cros_sdk -- cros_workon --board=${board} start chromiumos-assets
cros_sdk -- cros_workon_make --board=${board} chromiumos-assets
cros_sdk -- cros_workon_make --board=${board} chromiumos-assets --test
cros_sdk -- cros_workon_make --board=${board} chromiumos-assets --install
cp {{ logo_dir }}/* {{ cros_location }}/{{ branch }}/src/platform/chromiumos-assets/images_100_percent/
cp {{ logo_dir }}/* {{ cros_location }}/{{ branch }}/src/platform/chromiumos-assets/images_200_percent/
NAYU_IMAGE_LOCATION=${board}.nayuos.img
# rebuild packages with boot pictures
cros_sdk -- ./build_packages --board=${board} >> "${BUILD_LOG}"
# NayuOS images
date >> "${BUILD_LOG}"
echo "building image" >> "${BUILD_LOG}"
cros_sdk -- ./build_image --board=${board} dev >> "${BUILD_LOG}" \
&& cros_sdk -- rm -f $NAYU_IMAGE_LOCATION && cros_sdk -- touch $NAYU_IMAGE_LOCATION \
&& cros_sdk -- cros flash --board=${board} file://$NAYU_IMAGE_LOCATION >> "${BUILD_LOG}" \
&& cros_sdk -- ./test_nayuos_image ${board} > "${TEST_LOG}" \
|| { echo "An error occured while building ${board} NayuOS image. Exiting." ; exit 1 ;}
# save ~15Go/device but delete cache (next build will be as long)
if [ ${KEEP_CACHE,,} == "no" ] ; then
cros_sdk -- sudo rm -R /var/cache/chromeos-chrome/chrome-src/src/out_${board}
fi
if [ ${board} == daisy ]; then
echo "daisy board: removing accepted license for the next builds..."
cros_sdk -- sudo sh -c "mv /etc/make.conf.user.save /etc/make.conf.user"
fi
done
####################################### Post build ##########################################
# keep only the substring between - as current release number
RELEASE=$(echo ${BRANCH} | cut -d- -f2)
DIR_IMAGE_LOCATION={{ cros_location }}/images/${RELEASE}/$(date +'%F')
install ${DIR_IMAGE_LOCATION} -d
mv {{ cros_location }}/{{ branch }}/src/scripts/*.img ${DIR_IMAGE_LOCATION}
cd ${DIR_IMAGE_LOCATION}
for hashfunction in md5sum sha1sum sha256sum sha512sum; do
echo ${hashfunction} >> hashes.txt
${hashfunction} *.img >> hashes.txt
printf "\n\n" >> hashes.txt
done
for file in $(ls *.img); do
gzip -9 ${file}
done
exit 0
#!/bin/bash
FAILURE="FAILURE"
SUCCESS="SUCCESS"
if [ -z "$1" ] ; then
echo "Missing board argument. Exiting."
exit 1
fi
BOARD=$1
MOUNTPOINT="/tmp/${BOARD}"
ORIGINAL_GRANDENET_SCRIPT=~/trunk/src/third_party/chromiumos-overlay/net-misc/re6stnet/files/grandenet
GRANDENET_SCRIPT="usr/local/bin/grandenet"
INIT_SSH_SERVER="etc/init/openssh-server.conf"
BASHRC="etc/skel/.bashrc"
EXPECTED_ALIAS="alias git='git --exec-path=/usr/local/libexec/git-core/'"
VIRTUALENV_BIN="usr/local/bin/virtualenv"
HAS_FAILED=0
function print_result() {
test_result=$1
message=$2
printf "\t${test_result}: "
printf "${message}\n"
if [[ ${test_result} == ${FAILURE} ]] ; then
HAS_FAILED=1
fi
}
# MOUNT IMAGE AND GET INFO
install -d ${MOUNTPOINT}
./mount_gpt_image.sh --safe -f $( ./get_latest_image.sh --board=${BOARD} ) -r ${MOUNTPOINT}
echo $(ls "${MOUNTPOINT}/usr/local")
if [[ $(ls "${MOUNTPOINT}/usr/local") ]] ; then
my_diff=$(diff ${ORIGINAL_GRANDENET_SCRIPT} "${MOUNTPOINT}/${GRANDENET_SCRIPT}")
opensshd_config=$(ls "${MOUNTPOINT}/${INIT_SSH_SERVER}")
gitalias=$(grep "${EXPECTED_ALIAS}" "${MOUNTPOINT}/${BASHRC}")
virtualenvbin=$(ls "${MOUNTPOINT}/${VIRTUALENV_BIN}")
else
is_empty=1
fi
./mount_gpt_image.sh --safe -f $( ./get_latest_image.sh --board=${BOARD} ) -r ${MOUNTPOINT} -u
rmdir ${MOUNTPOINT}
# PRINT RESULTS
echo "* test if /usr/local exists"
if [[ ${is_empty} == 1 ]] ; then
print_result ${FAILURE} "/usr/local is empty."
else
print_result ${SUCCESS} "/usr/local is not empty."
echo "* test grandenet script existence and content"
if [[ ${no_grandenet_script} == 1 ]] ; then
print_result ${FAILURE} "grandenet script is missing (no file at ${GRANDENET_SCRIPT})."
elif [[ ${my_diff} != "" ]] ; then
print_result ${FAILURE} "grandenet scripts differs:\n${my_diff}"
else
print_result ${SUCCESS} "${GRANDENET_SCRIPT} exists and contains what is expected."
fi
echo "* test openssh server init script absence"
if [[ ${opensshd_config} ]] ; then
print_result ${FAILURE} "opensshd config exists: ${opensshd_config}"
else
print_result ${SUCCESS} "opensshd config removed."
fi
echo "* test git quick fix for option --exec-path"
if [[ ${gitalias} == "" ]] ; then
print_result ${FAILURE} "Expected alias for git command not in ${MOUNTPOINT}/${BASHRC}. Should be: ${EXPECTED_ALIAS}"
else
print_result ${SUCCESS} "git alias is correct."
fi
echo "* test if virtualenv binary exists"
if [[ ${virtualenvbin} ]] ; then
print_result ${SUCCESS} "virtualenv binary exists."
else
print_result ${FAILURE} "Expected virtualenv binary not in ${MOUNTPOINT}/${VIRTUALENV_BIN}."
fi
fi
exit ${HAS_FAILED}
[buildout]
extends =
# basic needs
../../stack/slapos.cfg
parts +=
# use stack/slapos.cfg
slapos-cookbook
verify-sudo-exists-on-host
template-instance
install-eggs-for-the-instance
#add eggs needed by the instance
find-links +=
https://pypi.python.org/packages/source/s/slapos.recipe.build/slapos.recipe.build-0.21.tar.gz
https://pypi.python.org/packages/source/p/plone.recipe.command/plone.recipe.command-1.1.zip
versions = versions
[versions]
plone.recipe.command = 1.1
slapos.recipe.template = 2.8
slapos.recipe.build = 0.21
[verify-sudo-exists-on-host]
recipe = plone.recipe.command
stop-on-error = true
# run the same command when installing and on updates
update-command = ${:command}
command = sudo -V
[template-instance]
# create the instance.cfg file in the buildout directory
# (use jinja for templating)
recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/instance.cfg
rendered = ${buildout:directory}/instance.cfg
md5sum = d5a12b1ea814b4bb6792dfebaa655f90
mode = 0644
scripts_dir = ${:_profile_base_location_}/scripts
logo_dir = ${:_profile_base_location_}/logo
context =
# for access to the eggs from the instance
key eggs_directory buildout:eggs-directory
key develop_eggs_directory buildout:develop-eggs-directory
key scripts_dir :scripts_dir
key logo_dir :logo_dir
[install-eggs-for-the-instance]
# after installation of the eggs, the recipes
# will be available (added to sys.path)
recipe = zc.recipe.egg
eggs = slapos.recipe.build
plone.recipe.command
\ No newline at end of file
LoadModule unixd_module modules/mod_unixd.so
LoadModule access_compat_module modules/mod_access_compat.so
LoadModule authz_core_module modules/mod_authz_core.so
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule version_module modules/mod_version.so
LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule socache_shmcb_module modules/mod_socache_shmcb.so
LoadModule ssl_module modules/mod_ssl.so
LoadModule mime_module modules/mod_mime.so
LoadModule dav_module modules/mod_dav.so
LoadModule dav_fs_module modules/mod_dav_fs.so
LoadModule negotiation_module modules/mod_negotiation.so
LoadModule rewrite_module modules/mod_rewrite.so
LoadModule headers_module modules/mod_headers.so
LoadModule deflate_module modules/mod_deflate.so
LoadModule filter_module modules/mod_filter.so
AddOutputFilterByType DEFLATE text/cache-manifest text/html text/plain text/css application/hal+json application/json application/x-javascript text/xml application/xml application/rss+xml text/javascript image/svg+xml
PidFile "{{ parameter_dict['pid-file'] }}"
ServerAdmin admin@
TypesConfig conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
ServerTokens Prod
ServerSignature Off
TraceEnable Off
TimeOut {{ parameter_dict['timeout'] }}
SSLCertificateFile {{ parameter_dict['cert'] }}
SSLCertificateKeyFile {{ parameter_dict['key'] }}
SSLRandomSeed startup builtin
SSLRandomSeed connect builtin
SSLProtocol All -SSLv2
#SSLHonorCipherOrder on
{% if parameter_dict['cipher'] -%}
SSLCipherSuite {{ parameter_dict['cipher'] }}
{%- endif %}
SSLSessionCache shmcb:{{ parameter_dict['ssl-session-cache'] }}(512000)
SSLProxyEngine On
# As backend is trusting REMOTE_USER header unset it always
RequestHeader unset REMOTE_USER
{% if parameter_dict['ca-cert'] -%}
SSLVerifyClient require
RequestHeader set REMOTE_USER %{SSL_CLIENT_S_DN_CN}s
SSLCACertificateFile {{ parameter_dict['ca-cert'] }}
SSLCARevocationCheck chain
SSLCARevocationFile {{ parameter_dict['crl'] }}
{%- endif %}
ErrorLog "{{ parameter_dict['error-log'] }}"
# Default apache log format with request time in microsecond at the end
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %D" combined
CustomLog "{{ parameter_dict['access-log'] }}" combined
<Directory />
Options FollowSymLinks
AllowOverride None
Allow from all
</Directory>
RewriteEngine On
{% for family_name, (port, _, backend, enable_authentication) in parameter_dict['backend-list'].items() -%}
{% for ip in parameter_dict['ip-list'] -%}
Listen {{ ip }}:{{ port }}
{% endfor -%}
<VirtualHost *:{{ port }}>
{% if enable_authentication -%}
SSLVerifyClient require
RequestHeader set REMOTE_USER %{SSL_CLIENT_S_DN_CN}s
SSLCACertificateFile {{ parameter_dict['shared-ca-cert'] }}
SSLCARevocationPath {{ parameter_dict['shared-crl'] }}
LogFormat "%h %l %{REMOTE_USER}i %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %D" combined
# We would like to separate the the authentificated logs.
ErrorLog "{{ parameter_dict['log-dir'] }}/apache-{{ family_name }}-error.log"
CustomLog "{{ parameter_dict['log-dir'] }}/apache-{{ family_name }}-access.log" combined
{% endif -%}
SSLEngine on
RewriteRule ^/(.*) {{ backend }}/$1 [L,P]
</VirtualHost>
{% endfor -%}
{% set part_list = [] -%}
{% set ssl_parameter_dict = slapparameter_dict.get('ssl', {}) %}
{% macro section(name) %}{% do part_list.append(name) %}{{ name }}{% endmacro -%}
{% set use_ipv6 = slapparameter_dict.get('use-ipv6', False) -%}
{% set shared_ca_path = slapparameter_dict['shared-certificate-authority-path'] -%}
{#
XXX: This template only supports exactly one IPv4 and (if ipv6 is used) one IPv6
per partition. No more (undefined result), no less (IndexError).
-#}
# TODO: insert varnish between apache & haproxy.
# And think of a way to specify which urls goe through varnish, which go
# directly to haproxy. (maybe just passing literal configuration file chunk)
{% set ipv4 = (ipv4_set | list)[0] -%}
{% set apache_ip_list = [ipv4] -%}
{% if ipv6_set -%}
{% set ipv6 = (ipv6_set | list)[0] -%}
{% do apache_ip_list.append('[' ~ ipv6 ~ ']') -%}
{% endif -%}
[simplefile]
recipe = slapos.recipe.template:jinja2
template = inline:{{ '{{ content }}' }}
{% macro simplefile(section_name, file_path, content, mode='') -%}
{% set content_section_name = section_name ~ '-content' -%}
[{{ content_section_name }}]
content = {{ dumps(content) }}
[{{ section(section_name) }}]
< = simplefile
rendered = {{ file_path }}
context = key content {{content_section_name}}:content
mode = {{ mode }}
{%- endmacro %}
{% if use_ipv6 -%}
[zope-tunnel-base]
recipe = slapos.cookbook:ipv4toipv6
runner-path = ${directory:services}/${:base-name}
6tunnel-path = {{ parameter_dict['6tunnel'] }}/bin/6tunnel
shell-path = {{ parameter_dict['dash'] }}/bin/dash
ipv4 = {{ ipv4 }}
{% endif -%}
{% set haproxy_dict = {} -%}
{% set apache_dict = {} -%}
{% set next_port = slapparameter_dict['tcpv4-port'] -%}
{% for family_name, parameter_id_list in sorted(
slapparameter_dict['zope-family-dict'].iteritems()) -%}
{% set zope_family_address_list = [] -%}
{% set has_webdav = [] -%}
{% for parameter_id in parameter_id_list -%}
{% set zope_address_list = slapparameter_dict[parameter_id] -%}
{% for zope_address, maxconn, webdav in zope_address_list -%}
{% if webdav -%}
{% do has_webdav.append(None) %}
{% endif -%}
{% if use_ipv6 -%}
[{{ section('zope-tunnel-' ~ next_port) }}]
< = zope-tunnel-base
base-name = {{ 'zeo-tunnel-' ~ next_port }}
ipv4-port = {{ next_port }}
ipv6-port = {{ zope_address.split(']:')[1] }}
ipv6 = {{ zope_address.split(']:')[0][1:] }}
{% set zope_effective_address = ipv4 ~ ":" ~ next_port -%}
{% set next_port = next_port + 1 -%}
{% else -%}
{% set zope_effective_address = zope_address -%}
{% endif -%}
{% do zope_family_address_list.append((zope_effective_address, maxconn, webdav)) -%}
{% endfor -%}
{% endfor -%}
{# Make rendering fail artificially if any family has no known backend.
# This is useful as haproxy's hot-reconfiguration mechanism is
# supervisord-incompatible.
# As jinja2 postpones KeyError until place-holder value is actually used,
# do a no-op getitem.
-#}
{% do zope_family_address_list[0][0] -%}
{% set haproxy_port = next_port -%}
{% set next_port = next_port + 1 -%}
{% do haproxy_dict.__setitem__(family_name, (haproxy_port, zope_family_address_list)) -%}
{% if has_webdav -%}
{% set internal_scheme = 'http' -%}{# mod_rewrite does not recognise webdav scheme -#}
{% set external_scheme = 'webdavs' -%}
{% else %}
{% set internal_scheme = 'http' -%}
{% set external_scheme = 'https' -%}
{% endif -%}
{% set backend_path = slapparameter_dict['backend-path-dict'][family_name] -%}
{% set ssl_authentication = slapparameter_dict['ssl-authentication-dict'][family_name] -%}
{% do apache_dict.__setitem__(family_name, (next_port, external_scheme, internal_scheme ~ '://' ~ ipv4 ~ ':' ~ haproxy_port ~ backend_path, ssl_authentication)) -%}
{% set next_port = next_port + 1 -%}
{% endfor -%}
[haproxy-cfg-parameter-dict]
socket-path = ${directory:run}/haproxy.sock
server-check-path = {{ dumps(slapparameter_dict['haproxy-server-check-path']) }}
backend-dict = {{ dumps(haproxy_dict) }}
ip = {{ ipv4 }}
[haproxy-cfg]
recipe = slapos.recipe.template:jinja2
template = {{ parameter_dict['template-haproxy-cfg'] }}
rendered = ${directory:etc}/haproxy.cfg
context = section parameter_dict haproxy-cfg-parameter-dict
extensions = jinja2.ext.do
[{{ section('haproxy') }}]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:services}/haproxy
command-line = "{{ parameter_dict['haproxy'] }}/sbin/haproxy" -f "${haproxy-cfg:rendered}"
{# TODO: build socat and wrap it as "${directory:bin}/haproxy-ctl" to connect to "${haproxy-cfg-parameter-dict:socket-path}" #}
[apache-conf-ssl]
cert = ${directory:apache-conf}/apache.crt
key = ${directory:apache-conf}/apache.pem
ca-cert = ${directory:apache-conf}/ca.crt
crl = ${directory:apache-conf}/crl.pem
[apache-conf-parameter-dict]
backend-list = {{ dumps(apache_dict) }}
ip-list = {{ dumps(apache_ip_list) }}
pid-file = ${directory:run}/apache.pid
error-log = ${directory:log}/apache-error.log
access-log = ${directory:log}/apache-access.log
log-dir = ${directory:log}
# Apache 2.4's default value (60 seconds) can be a bit too short
timeout = 300
# Basic SSL server configuration
cert = ${apache-ssl:cert}
key = ${apache-ssl:key}
cipher =
ssl-session-cache = ${directory:log}/apache-ssl-session-cache
# Client x509 auth
ca-cert = ${apache-ssl-client:cert}
crl = ${apache-ssl-client:crl}
{% if shared_ca_path -%}
shared-ca-cert = {{ shared_ca_path }}/cacert.pem
shared-crl = {{ shared_ca_path }}/crl
{%- endif %}
[apache-conf]
recipe = slapos.recipe.template:jinja2
template = {{ parameter_dict['template-apache-conf'] }}
rendered = ${directory:apache-conf}/apache.conf
context = section parameter_dict apache-conf-parameter-dict
[{{ section('apache') }}]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:services}/apache
command-line = "{{ parameter_dict['apache'] }}/bin/httpd" -f "${apache-conf:rendered}" -DFOREGROUND
[{{ section('apache-promise') }}]
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
recipe = slapos.cookbook:check_port_listening
path = ${directory:promise}/apache
hostname = {{ ipv4 }}
port = {{ apache_dict.values()[0][0] }}
[publish]
recipe = slapos.cookbook:publish.serialised
{% for family_name, (apache_port, scheme, _, _) in apache_dict.items() -%}
{{ family_name ~ '-v6' }} = {% if ipv6_set %}{{ scheme ~ '://[' ~ ipv6 ~ ']:' ~ apache_port }}{% endif %}
{{ family_name }} = {{ scheme ~ '://' ~ ipv4 ~ ':' ~ apache_port }}
{% endfor -%}
[apache-ssl]
{% if ssl_parameter_dict.get('key') -%}
key = ${apache-ssl-key:rendered}
cert = ${apache-ssl-cert:rendered}
{{ simplefile('apache-ssl-key', '${apache-conf-ssl:key}', ssl_parameter_dict['key']) }}
{{ simplefile('apache-ssl-cert', '${apache-conf-ssl:cert}', ssl_parameter_dict['cert']) }}
{% else %}
recipe = plone.recipe.command
command = "{{ parameter_dict['openssl'] }}/bin/openssl" req -newkey rsa -batch -new -x509 -days 3650 -nodes -keyout "${:key}" -out "${:cert}"
key = ${apache-conf-ssl:key}
cert = ${apache-conf-ssl:cert}
{%- endif %}
[apache-ssl-client]
{% if ssl_parameter_dict.get('ca-cert') -%}
cert = ${apache-ssl-ca:rendered}
crl = ${apache-ssl-crl:rendered}
{{ simplefile('apache-ssl-ca', '${apache-conf-ssl:ca-cert}', ssl_parameter_dict['ca-cert']) }}
{{ simplefile('apache-ssl-crl', '${apache-conf-ssl:crl}', ssl_parameter_dict['crl']) }}
{% else %}
cert =
crl =
{%- endif %}
{% set apache_service_log_list = {} -%}
{% for family_name, (_, _, _, authentication) in apache_dict.items() -%}
{% if authentication -%}
{% set base_name = 'apache-' ~ family_name -%}
{% do part_list.append('logrotate-' ~ base_name) -%}
{% do apache_service_log_list.__setitem__(family_name, base_name) -%}
[logrotate-{{ base_name }}]
recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
name = {{ base_name }}
log = ${apache-conf-parameter-dict:log-dir}/{{ base_name }}-error.log ${apache-conf-parameter-dict:log-dir}/{{ base_name }}-access.log
post = {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${apache-conf-parameter-dict:pid-file} -s USR1
{% endif -%}
{% endfor -%}
[logrotate-apache]
recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
name = apache
log = ${apache-conf-parameter-dict:error-log} ${apache-conf-parameter-dict:access-log}
post = {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${apache-conf-parameter-dict:pid-file} -s USR1
[directory]
recipe = slapos.cookbook:mkdirectory
apache-conf = ${:etc}/apache
bin = ${buildout:directory}/bin
etc = ${buildout:directory}/etc
promise = ${directory:etc}/promise
services = ${:etc}/run
var = ${buildout:directory}/var
run = ${:var}/run
log = ${:var}/log
ca-dir = ${buildout:directory}/srv/ssl
requests = ${:ca-dir}/requests
private = ${:ca-dir}/private
certs = ${:ca-dir}/certs
newcerts = ${:ca-dir}/newcerts
crl = ${:ca-dir}/crl
[monitor-instance-parameter]
monitor-httpd-ipv6 = {{ (ipv6_set | list)[0] }}
monitor-httpd-port = {{ next_port }}
monitor-title = Balancer monitor
[buildout]
extends =
{{ logrotate_cfg }}
{{ parameter_dict['template-monitor'] }}
parts +=
publish
logrotate-apache
{{ part_list | join('\n ') }}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
services = ${:etc}/run
promise = ${:etc}/promise
[erp5-bootstrap]
recipe = slapos.cookbook:erp5.bootstrap
runner-path = ${directory:services}/erp5-bootstrap
{# Note: a random domain name will be picked if several point to the same IP -#}
{% set reverse_hosts = {} -%}
{% for x, y in publish['hosts-dict'].iteritems() -%}
{% do reverse_hosts.__setitem__(y, x) -%}
{% endfor -%}
{# XXX: Expect the first database to be the one to use for catalog. -#}
{% set mysql_parsed = urlparse.urlparse(publish['mariadb-database-list'][0]) -%}
mysql-url = {{ dumps(urlparse.urlunparse(mysql_parsed[:1] + (mysql_parsed.username + ":" + mysql_parsed.password + "@" + reverse_hosts.get(mysql_parsed.hostname, mysql_parsed.hostname) + ':' ~ mysql_parsed.port, ) + mysql_parsed[2:])) }}
{# Pick the first http[s] family found, they should be all equivalent anyway. -#}
{# Don't pick the https[s] configurated with ssl-authenticat=true. By convention, this family name contain 'service'. -#}
{% set family_list = [] -%}
{% for key, value in publish.items() -%}
{% if key.startswith('family-') and value.startswith('http') and not 'service' in key -%}
{% do family_list.append(value.split('://', 1)) -%}
{% endif -%}
{% endfor -%}
zope-url = {{ dumps(family_list[0][0] + '://' + publish['inituser-login'] + ':' + publish_early['inituser-password'] + '@' + family_list[0][1] + '/' + publish['site-id']) }}
[promise-erp5-site]
recipe = slapos.cookbook:check_url_available
url = ${erp5-bootstrap:zope-url}
path = ${directory:promise}/erp5-site
dash_path = {{ parameter_dict['dash-location'] }}/bin/dash
curl_path = {{ parameter_dict['curl-location'] }}/bin/curl
[buildout]
parts = promise-erp5-site
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
This diff is collapsed.
......@@ -23,7 +23,7 @@ repository_id_list = erp5 vifib/master
[erp5]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/erp5.git
repository = https://lab.nexedi.com/nexedi/erp5.git
branch = erp5-vifib
git-executable = ${git:location}/bin/git
......@@ -54,6 +54,33 @@ dummy +=
extra-paths +=
${vifib:location}/master
### Overwrite recipes to introduce customized changes
[download-base-part]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename}
mode = 644
[template-erp5]
< = download-base-part
filename = instance-erp5.cfg.in
md5sum = 6b7712c26dfdd105e8a1143d04839651
[template-balancer]
< = download-base-part
filename = instance-balancer.cfg.in
md5sum = 02c258e51ff4619efe258bbf24b9ceed
[template-apache-conf]
< = download-base-part
filename = apache.conf.in
md5sum = 77c9e3cd1e95279761310cd0eeda78b3
[template-create-erp5-site-real]
< = download-base-part
filename = instance-create-erp5-site-real.cfg.in
md5sum = 86a2b244341218cd0c4b6d398c61ee20
[versions]
python-memcached = 1.47
facebook-sdk = 0.4.0
......
......@@ -40,10 +40,6 @@ recipe = plone.recipe.command
command = echo "Updating setup...";cd $${slapos.core:location}; export PATH="$${slapos-test-runner:prepend-path}:$PATH"; export CPPFLAGS="$${environment:CPPFLAGS}"; export LDFLAGS="$${environment:LDFLAGS}"; export PYTHONPATH="$${environment:PYTHONPATH}"; export LOCAL_IPV4="$${environment:LOCAL_IPV4}"; ${python2.7:location}/bin/python setup.py test -n; ${python2.7:location}/bin/python setup.py test -n; ${python2.7:location}/bin/python setup.py test -n; ${python2.7:location}/bin/python setup.py test -n
update-command = $${:command}
[slapos.package]
<= download-source
repository = ${slapos.package-repository:location}
[slapos.cookbook]
<= download-source
repository = ${slapos.cookbook-repository:location}
......@@ -79,7 +75,6 @@ run-test-suite-binary = ${buildout:bin-directory}/runTestSuite
test-list =
$${slapos.cookbook:location}
$${slapos.core:location}
$${slapos.package:location}
$${slapos.recipe.template:location}
$${slapos.recipe.build:location}
$${slapos.recipe.cmmi:location}
......
......@@ -14,7 +14,6 @@ extends =
parts =
slapos.cookbook-repository
slapos.core-repository
slapos.package-repository
slapos.recipe.template-repository
slapos.recipe.build-repository
slapos.recipe.cmmi-repository
......@@ -27,6 +26,7 @@ parts =
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
Jinja2
erp5.util
slapos.cookbook
collective.recipe.template
......@@ -46,16 +46,12 @@ branch = master
[slapos.cookbook-repository]
<= git-clone-repository
repository = http://git.erp5.org/repos/slapos.git
repository = https://lab.nexedi.com/nexedi/slapos.git
[slapos.core-repository]
<= git-clone-repository
repository = https://lab.nexedi.com/nexedi/slapos.core.git
[slapos.package-repository]
<= git-clone-repository
repository = https://lab.nexedi.com/nexedi/slapos.package.git
[slapos.recipe.template-repository]
<= git-clone-repository
repository = https://lab.nexedi.com/nexedi/slapos.recipe.template.git
......@@ -70,12 +66,12 @@ repository = https://lab.nexedi.com/nexedi/slapos.recipe.cmmi.git
[erp5-util-repository]
<= git-clone-repository
repository = http://git.erp5.org/repos/erp5.git
repository = https://lab.nexedi.com/nexedi/erp5.git
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
md5sum = 934df9a6b59fd7cc43195931b3ba4520
md5sum = 19b76f8a9f12be111e3a6600b337c21b
output = ${buildout:directory}/template.cfg
mode = 640
......
......@@ -41,7 +41,7 @@ setup = ${slapos.cookbook-repository:location}
# Used for resiliency tests only
[erp5.util-repository]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/erp5.git
repository = https://lab.nexedi.com/nexedi/erp5.git
branch = master
git-executable = ${git:location}/bin/git
develop = true
......
......@@ -113,6 +113,6 @@
"instance-name": {
"description": "Name of the instance, to show in the window title",
"type": "string"
},
}
}
}
......@@ -233,7 +233,7 @@ recipe = slapos.recipe.template:jinja2
# XXX: "template.cfg" is hardcoded in instanciation recipe
rendered = ${buildout:directory}/template.cfg
template = ${:_profile_base_location_}/instance.cfg.in
md5sum = 98a4edfb18cfd810ea570f56d502a2cc
md5sum = fdaaac449c92dcd68f0434b244d579f2
mode = 640
context =
key mariadb_link_binary template-mariadb:link-binary
......@@ -406,7 +406,7 @@ jupyter-enable-default = false
[erp5]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/erp5.git
repository = https://lab.nexedi.com/nexedi/erp5.git
branch = master
git-executable = ${git:location}/bin/git
......@@ -702,7 +702,7 @@ objgraph = 2.0.1
pandas = 0.16.1
ply = 3.8
polib = 1.0.7
pprofile = 1.7.3
pprofile = 1.8
ptyprocess = 0.5
pycountry = 1.19
pyflakes = 1.0.0
......
......@@ -74,10 +74,13 @@ filename = instance-erp5.cfg
extra-context =
key jupyter_enable_default dynamic-template-erp5-parameters:jupyter-enable-default
key local_bt5_repository dynamic-template-erp5-parameters:local-bt5-repository
key openssl_location :openssl-location
import urlparse urlparse
import-list =
rawfile root_common {{ root_common }}
openssl-location = {{ openssl_location }}
[dynamic-template-balancer-parameters]
apache = {{ apache_location }}
openssl = {{ openssl_location }}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment