Commit bfd5cb9f authored by Thomas Gambier's avatar Thomas Gambier

cleanup: remove unused python2 recipes

parent e73f7282
Pipeline #23465 failed with stage
in 0 seconds
......@@ -79,7 +79,6 @@ setup(name=name,
'addresiliency = slapos.recipe.addresiliency:Recipe',
'apacheperl = slapos.recipe.apacheperl:Recipe',
'apachephp = slapos.recipe.apachephp:Recipe',
'apachephpconfigure = slapos.recipe.apachephpconfigure:Recipe',
'apacheproxy = slapos.recipe.apacheproxy:Recipe',
'certificate_authority = slapos.recipe.certificate_authority:Recipe',
'certificate_authority.request = slapos.recipe.certificate_authority:Request',
......@@ -108,14 +107,12 @@ setup(name=name,
'generic.memcached = slapos.recipe.generic_memcached:Recipe',
'generic.mysql.wrap_update_mysql = slapos.recipe.generic_mysql:WrapUpdateMySQL',
'gitinit = slapos.recipe.gitinit:Recipe',
'haproxy = slapos.recipe.haproxy:Recipe',
  • @tomo this haproxy is still used in https://lab.nexedi.com/nexedi/slapos/blob/f082ad99e596a8dc5174f56b4d3c46a4f11896a3/software/cloudooo/instance-cloudooo.cfg.in#L185

    in ERP5 we don't use it and just generate an haproxy config file with a jinja template. This haproxy config was not really good for cloudooo, the timeouts were a bit short, so I guess our plan is to change cloudooo software release to include its own haproxy.cfg.in like we have in stack/erp5 , right ?

  • In !1281 (merged) we change cloudooo to use a simple template for haproxy.cfg

Please register or sign in to reply
'ipv4toipv6 = slapos.recipe.6tunnel:FourToSix',
'ipv6toipv4 = slapos.recipe.6tunnel:SixToFour',
'jsondump = slapos.recipe.jsondump:Recipe',
'logrotate = slapos.recipe.logrotate:Recipe',
'logrotate.d = slapos.recipe.logrotate:Part',
'mkdirectory = slapos.recipe.mkdirectory:Recipe',
'mioga.instantiate = slapos.recipe.mioga.instantiate:Recipe',
'nbdserver = slapos.recipe.nbdserver:Recipe',
'neoppod.cluster = slapos.recipe.neoppod:Cluster',
'neoppod.admin = slapos.recipe.neoppod:Admin',
......
##############################################################################
#
# Copyright (c) 2012 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from __future__ import print_function
from slapos.recipe.librecipe import GenericBaseRecipe
import zc.buildout
import sys
import zc.recipe.egg
class Recipe(GenericBaseRecipe):
def install(self):
"""
Taken out from the old "lamp" recipe. Allows do configure a LAMP instance.
"""
self.path_list = []
document_root = self.options['htdocs']
url = self.options.get('url', '')
mysql_conf = {'mysql_host': self.options['mysql-host'],
'mysql_port': self.options['mysql-port'],
'mysql_user': self.options['mysql-username'],
'mysql_password': self.options['mysql-password'],
'mysql_database': self.options['mysql-database'],
}
self.configureInstallation(document_root, url, mysql_conf)
return self.path_list
def configureInstallation(self, document_root, url, mysql_conf):
"""Start process which can launch python scripts, move or remove files or
directories when installing software.
"""
if not self.options.has_key('delete') and not self.options.has_key('rename') and not\
self.options.has_key('chmod') and not self.options.has_key('script') and not self.options.has_key('sql-script'):
return ""
delete = []
chmod = []
data = []
rename = []
rename_list = ""
argument = [self.options['lampconfigure'], "-H", mysql_conf['mysql_host'], "-P", mysql_conf['mysql_port'],
"-p", mysql_conf['mysql_password'], "-u", mysql_conf['mysql_user']]
if not self.options.has_key('file_token'):
argument = argument + ["-d", mysql_conf['mysql_database'],
"--table", self.options['table_name'].strip(), "--cond",
self.options.get('constraint', '1').strip()]
else:
argument = argument + ["-f", self.options['file_token'].strip()]
argument += ["-t", document_root]
if self.options.has_key('delete'):
delete = ["delete"]
for fname in self.options['delete'].split(','):
delete.append(fname.strip())
if self.options.has_key('rename'):
for fname in self.options['rename'].split(','):
if fname.find("=>") < 0:
old_name = fname
fname = []
fname.append(old_name)
fname.append(old_name + '-' + mysql_conf['mysql_user'])
else:
fname = fname.split("=>")
cmd = ["rename"]
if self.options.has_key('rename_chmod'):
cmd += ["--chmod", self.options['rename_chmod'].strip()]
rename.append(cmd + [fname[0].strip(), fname[1].strip()])
rename_list += fname[0] + " to " + fname[1] + " "
if self.options.has_key('chmod'):
chmod = ["chmod", self.options['mode'].strip()]
for fname in self.options['chmod'].split(','):
chmod.append(fname.strip())
if self.options.has_key('script') and \
self.options['script'].strip().endswith(".py"):
data = ["run", self.options['script'].strip(), "-v", mysql_conf['mysql_database'], url, document_root]
if self.options.has_key('sql-script'):
data = ["sql", self.options['sql-script'].strip(), "-v", mysql_conf['mysql_database'], url, document_root]
# TODO factor
if delete != []:
print("Creating lampconfigure with 'delete' arguments")
command = argument + delete
if rename != []:
for parameters in rename:
print("Creating lampconfigure with 'rename' arguments")
command = argument + rename
if chmod != []:
print("Creating lampconfigure with 'chmod' arguments")
command = argument + chmod
if data != []:
print("Creating lampconfigure with 'run' arguments")
command = argument + data
configureinstall_wrapper_path = self.createPythonScript(
self.options['configureinstall-location'],
__name__ + '.runner.executeRunner',
(argument, delete, rename, chmod, data)
)
#TODO finish to port this and remove upper one
#configureinstall_wrapper_path = self.createPythonScript(
# self.options['configureinstall-location'],
# 'slapos.lamp.run',
# [command]
#)
self.path_list.append(configureinstall_wrapper_path)
return rename_list
import subprocess
def executeRunner(arguments, delete, rename, chmod, data):
"""Start the instance configure. this may run a python script, move or/and rename
file or directory when dondition is filled. the condition may be when file exist or when an entry
exist into database.
"""
if delete:
print "Calling lampconfigure with 'delete' arguments"
subprocess.call(arguments + delete)
if rename:
for parameters in rename:
print "Calling lampconfigure with 'rename' arguments"
subprocess.call(arguments + parameters)
if chmod:
print "Calling lampconfigure with 'chmod' arguments"
subprocess.call(arguments + chmod)
if data:
print "Calling lampconfigure with 'run' arguments"
print arguments + data
subprocess.call(arguments + data)
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
"""
haproxy instance configuration.
name -- local name of the haproxy
wrapper-path -- location of the init script to generate
binary-path -- location of the haproxy command
ctl-path -- location of the haproxy control script
conf-path -- location of the configuration file
socket-path -- location of the socket file for administration
ip -- ip of the haproxy server
port -- port of the haproxy server
server-check-path -- path of the domain to check
address -- string with list of all url to check
Example: 127.0.0.1:12004 127.0.0.1:12005
"""
def install(self):
# inter must be quite short in order to detect quickly an unresponsive node
# and to detect quickly a node which is back
# rise must be minimal possible : 1, indeed, a node which is back don't need
# to sleep more time and we can give him work immediately
# fall should be quite sort. with inter at 3, and fall at 2, a node will be
# considered as dead after 6 seconds.
# maxconn should be set as the maximum thread we have per zope, like this
# haproxy will manage the queue of request with the possibility to
# move a request to another node if the initially selected one is dead
# maxqueue is the number of waiting request in the queue of every zope client.
# It allows to make sure that there is not a zope client handling all
# the work while other clients are doing nothing. This was happening
# even thoug we have round robin distribution because when a node dies
# some seconds, all request are dispatched to other nodes, and then users
# stick in other nodes and are not coming back. Please note this option
# is not an issue if you have more than (maxqueue * node_quantity) requests
# because haproxy will handle a top-level queue
try:
backend_dict = self.options['backend-dict']
except KeyError:
backend_list = self.options['backend-list']
if isinstance(backend_list, str):
# BBB
backend_list = backend_list.split()
backend_dict = {
self.options['name']: (self.options['port'], backend_list),
}
server_snippet_filename = self.getTemplateFilename(
'haproxy-server-snippet.cfg.in')
listen_snippet_filename = self.getTemplateFilename(
'haproxy-listen-snippet.cfg.in')
server_snippet = ""
ip = self.options['ip']
server_check_path = self.options.get('server-check-path', None)
if server_check_path:
httpchk = 'option httpchk GET %s' % server_check_path
else:
httpchk = ''
# FIXME: maxconn must be provided per-backend, not globally
maxconn = self.options['maxconn']
i = 0
for name, (port, backend_list) in backend_dict.iteritems():
server_snippet += self.substituteTemplate(
listen_snippet_filename, {
'name': name,
'ip': ip,
'port': port,
'httpchk': httpchk,
})
for address in backend_list:
i += 1
server_snippet += self.substituteTemplate(
server_snippet_filename, {
'name': '%s_%s' % (name, i),
'address': address,
'cluster_zope_thread_amount': maxconn,
})
configuration_path = self.createFile(
self.options['conf-path'],
self.substituteTemplate(
self.getTemplateFilename('haproxy.cfg.in'),
{'socket_path': self.options['socket-path'],
'server_text': server_snippet},
)
)
wrapper_path = self.createWrapper(
self.options['wrapper-path'],
(self.options['binary-path'].strip(), '-f', configuration_path))
ctl_path = self.createPythonScript(
self.options['ctl-path'],
__name__ + '.haproxy.haproxyctl',
(self.options['socket-path'],))
return [configuration_path, wrapper_path, ctl_path]
import socket
try:
import readline
except ImportError:
pass
def haproxyctl(socket_path):
while True:
try:
l = raw_input('> ')
except EOFError:
print
break
if l == 'quit':
break
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(socket_path)
s.send('%s\n' % l)
while True:
r = s.recv(1024)
if not r:
break
print r
s.close()
listen %(name)s
bind %(ip)s:%(port)s
cookie SERVERID insert
balance roundrobin
%(httpchk)s
stats uri /haproxy
stats realm Global\ statistics
server %(name)s %(address)s cookie %(name)s check inter 3s rise 1 fall 2 maxqueue 5 maxconn %(cluster_zope_thread_amount)s
global
maxconn 4096
stats socket %(socket_path)s level admin
defaults
mode http
retries 1
option redispatch
maxconn 2000
# it is useless to have timeout much bigger than the one of apache.
# By default apache use 300s, so we set slightly more in order to
# make sure that apache will first stop the connection.
timeout server 305s
# Stop waiting in queue for a zope to become available.
# If no zope can be reached after one minute, consider the request will
# never succeed.
timeout queue 60s
# The connection should be immediate on LAN,
# so we should not set more than 5 seconds, and it could be already too much
timeout connect 5s
# As requested in haproxy doc, make this "at least equal to timeout server".
timeout client 305s
# Use "option httpclose" to not preserve client & server persistent connections
# while handling every incoming request individually, dispatching them one after
# another to servers, in HTTP close mode. This is really needed when haproxy
# is configured with maxconn to 1, without this options browser are unable
# to render a page
option httpclose
%(server_text)s
This diff is collapsed.
[buildout]
parts =
postgres-urlparse
# apacheperl-promise
mioga-instance
cron-entry-crawler
sshkeys-dropbear
dropbear-server-add-authorized-key
sshkeys-authority
publish-connection-information
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
bin = $${buildout:directory}/bin
etc = $${buildout:directory}/etc
srv = $${buildout:directory}/srv
log = $${buildout:directory}/log
var = $${buildout:directory}/var
buildinst = $${buildout:directory}/buildinst
[basedirectory]
recipe = slapos.cookbook:mkdirectory
services = $${rootdirectory:etc}/run
promises = $${rootdirectory:etc}/promise
htdocs = $${rootdirectory:srv}/htdocs
cronstamps = $${rootdirectory:etc}/cronstamps/
cron-entries = $${rootdirectory:etc}/cron.d/
crontabs = $${rootdirectory:etc}/crontabs/
sshkeys = $${rootdirectory:srv}/sshkeys
ssh = $${rootdirectory:etc}/ssh
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = $${rootdirectory:bin}/cron_simplelogger
log = $${rootdirectory:log}/crond.log
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cronstamps = $${basedirectory:cronstamps}
cron-entries = $${basedirectory:cron-entries}
crontabs = $${basedirectory:crontabs}
catcher = $${cron-simplelogger:wrapper}
binary = $${basedirectory:services}/crond
[cron-entry-crawler]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 23 50 * * *
command = $${mioga-instance:bin_dir}/crawl.sh
[apacheperl-promise]
recipe = slapos.cookbook:check_port_listening
path = $${basedirectory:promises}/httpd_promise
hostname = $${apacheperl-instance:ip}
port = $${apacheperl-instance:port}
[publish-connection-information]
recipe = slapos.cookbook:publish
direct_url = $${mioga-url:direct_url}
ssh_command = ssh $${dropbear-server:host} -p $${dropbear-server:port}
url = $${request-frontend:connection-site_url}
# Request POSTGRES INSTANCE and parse its URL
[request-postgres]
<= slap-connection
recipe = slapos.cookbook:request
name = Postgres
software-url = $${slap-connection:software-release-url}
software-type = postgres
return = url
sla-computer_guid = $${slap-connection:computer-id}
[postgres-urlparse]
recipe = slapos.cookbook:urlparse
url = $${request-postgres:connection-url}
[symlinks]
recipe = cns.recipe.symlink
symlink_target = $${rootdirectory:bin}
symlink_base = ${postgresql:location}/bin
# SSH SERVER
[sshkeys-directory]
recipe = slapos.cookbook:mkdirectory
requests = $${basedirectory:sshkeys}/requests/
keys = $${basedirectory:sshkeys}/keys/
[sshkeys-authority]
recipe = slapos.cookbook:sshkeys_authority
request-directory = $${sshkeys-directory:requests}
keys-directory = $${sshkeys-directory:keys}
wrapper = $${basedirectory:services}/sshkeys_authority
keygen-binary = ${dropbear:location}/bin/dropbearkey
[dropbear-server]
recipe = slapos.cookbook:dropbear
host = $${slap-network-information:global-ipv6}
port = 2222
home = $${basedirectory:ssh}
wrapper = $${rootdirectory:bin}/raw_sshd
shell = /bin/bash
rsa-keyfile = $${basedirectory:ssh}/server_key.rsa
dropbear-binary = ${dropbear:location}/sbin/dropbear
[sshkeys-dropbear]
<= sshkeys-authority
recipe = slapos.cookbook:sshkeys_authority.request
name = dropbear
type = rsa
executable = $${dropbear-server:wrapper}
public-key = $${dropbear-server:rsa-keyfile}.pub
private-key = $${dropbear-server:rsa-keyfile}
wrapper = $${basedirectory:services}/sshd
[dropbear-server-add-authorized-key]
<= dropbear-server
recipe = slapos.cookbook:dropbear.add_authorized_key
key = $${slap-parameter:authorized-key}
[slap-parameter]
# Default value if no ssh key is specified
authorized-key =
# IPv4 AND PORT 80 FRONTEND
[request-frontend]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Frontend
# XXX We have hardcoded SR URL here.
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
slave = true
config-url = $${mioga-url:direct_url}
return = site_url
# MIOGA INSTANCE
[mioga-instance]
recipe = slapos.cookbook:mioga.instantiate
# mioga_compile_dir = ${template-apacheperl:compile-directory}
# Pity that the following line does not work. Or does it?
# mioga_compile_env = ${mioga:environment}
mioga_location = ${mioga:location}
mioga_buildinst = ${mioga:buildinst}
libxslt_bin = ${libxslt:location}/bin
libxml2_bin = ${libxml2:location}/bin
rsync_bin = ${rsync:location}/bin
var_directory = $${rootdirectory:var}
buildinst_directory = $${rootdirectory:buildinst}
instance_root = $${buildout:directory}
perl_bin = ${perl:location}/bin
postgres_bin = ${postgresql:location}/bin
htdocs = $${apacheperl-instance:htdocs}
db_host = $${postgres-urlparse:host}
db_port = $${postgres-urlparse:port}
db_dbname = $${postgres-urlparse:path}
db_username = $${postgres-urlparse:username}
db_password = $${postgres-urlparse:password}
public_ipv6 = $${slap-network-information:global-ipv6}
public_ipv6_port = 8080
private_ipv4 = $${slap-network-information:local-ipv4}
httpd_binary = ${apache-2.2:location}/bin/httpd
path = $${basedirectory:services}/apacheperl
htdocs = $${basedirectory:htdocs}
httpd_conf = $${rootdirectory:etc}/httpd.conf
pid_file = $${basedirectory:services}/apache.pid
lock_file = $${basedirectory:services}/apache.lock
dav_locks = $${buildout:directory}/var/dav_locks
services_dir = $${basedirectory:services}
error_log = $${rootdirectory:log}/error.log
access_log = $${rootdirectory:log}/access.log
bin_dir = $${rootdirectory:bin}
log_dir = $${rootdirectory:log}
site_perl = ${perl:siteprefix}
[mioga-url]
direct_url = http://[$${slap-network-information:global-ipv6}]:$${mioga-instance:public_ipv6_port}
\ No newline at end of file
[buildout]
parts =
symlinks
publish
postgres-instance
postgres-promise
# Define egg directories to be the one from Software Release
# (/opt/slapgrid/...)
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[instance-parameters]
# Fetches parameters defined in SlapOS Master for this instance
recipe = slapos.cookbook:slapconfiguration
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
bin = $${buildout:directory}/bin
etc = $${buildout:directory}/etc
services = $${rootdirectory:etc}/run/
promises = $${rootdirectory:etc}/promise/
var = $${buildout:directory}/var
[symlinks]
recipe = cns.recipe.symlink
symlink_target = $${rootdirectory:bin}
symlink_base = ${postgresql:location}/bin
[postgres-instance]
# create cluster, configuration files and a database
recipe = slapos.cookbook:postgres
# Options
ipv6_host = $${slap-network-information:global-ipv6}
user = mioga
port = 5432
dbname = mioga2
# pgdata_directory is created by initdb, and should not exist beforehand.
pgdata-directory = $${rootdirectory:var}/data
services = $${rootdirectory:services}
bin = $${rootdirectory:bin}
# Deploy promises scripts
[postgres-promise]
recipe = slapos.cookbook:check_port_listening
path = $${rootdirectory:promises}/postgres
hostname = $${slap-network-information:global-ipv6}
port = $${postgres-instance:port}
[publish]
recipe = slapos.cookbook:publishurl
url = $${postgres-instance:url}
[buildout]
parts =
switch-softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${template-apacheperl:output}
postgres = ${template-postgres:output}
import fnmatch
import grp
import os
import pprint
import pwd
import re
import shutil
import sys
class FileModifier:
def __init__(self, filename):
self.filename = filename
f = open(filename, 'rb')
self.content = f.read()
f.close()
def modify(self, key, value):
(self.content, count) = re.subn(
r'(<parameter[^>]*\sname\s*=\s*"' + re.escape(key) + r'"[^>]*\sdefault\s*=\s*")[^"]*',
r"\g<1>" + value,
self.content)
return count
def save(self):
f = open(self.filename, 'w')
f.write(self.content)
f.close()
def pre_configure_hook(options, bo, env):
location = options['location']
# TODO: double-check which one of these values must be set
# at instantiation time!
fm = FileModifier('conf/Config.xml')
fm.modify('apache_user', pwd.getpwuid(os.getuid())[0])
fm.modify('apache_group', grp.getgrgid(os.getgid())[0])
mioga_base = os.path.join(location, 'var', 'lib', 'Mioga2')
fm.modify('install_dir', mioga_base)
fm.modify('tmp_dir', os.path.join(mioga_base, 'tmp'))
fm.modify('search_tmp_dir', os.path.join(mioga_base, 'mioga_search'))
fm.modify('maildir', os.path.join(location, 'var', 'spool', 'mioga', 'maildir'))
fm.modify('maildirerror', os.path.join(location, 'var', 'spool', 'mioga', 'error'))
fm.modify('mailfifo', os.path.join(location, 'var', 'spool', 'mioga', 'fifo'))
fm.save()
# TODO: mail settings are certainly wrong, what is the domain name?
# Correct shebangs to the right Perl
for root, dirnames, filenames in os.walk('.'):
for filename in fnmatch.filter(filenames, '*.pl'):
with open(os.path.join(root, filename), "r+") as f:
lines = f.readlines()
(lines[0], count) = re.subn(r'^#!/usr/bin/perl',
'#!' + options['perl-binary'],
lines[0], 1)
if count > 0:
f.seek(0)
f.writelines(lines)
print "Corrected interpreter for script "+filename
# def post_make_hook(options, buildout):
# location = options['location']
# print "Mioga - postmakehook"
# print "We are currently in", os.getcwd()
# shutil.move("var", location)
# return None
This diff is collapsed.
[buildout]
develop =
/srv/slapgrid/slappart9/srv/runner/project/slapos
/opt/slapdev_build
extends =
../../component/rsync/buildout.cfg
../../component/apache-perl/buildout.cfg
../../component/perl-XML-Parser/buildout.cfg
../../component/perl-XML-LibXML/buildout.cfg
../../component/perl-Term-ReadLine-Gnu/buildout.cfg
../../component/perl-Image-Magick/buildout.cfg
../../component/postgresql/buildout.cfg
../../component/libxslt/buildout.cfg
../../component/dcron/buildout.cfg
../../component/dropbear/buildout.cfg
../../component/lxml-python/buildout.cfg
../../stack/slapos.cfg
parts =
eggs
apache-perl
perl-Apache2-Request
perl-Crypt-SSLeay
perl-DBD-Pg
perl-XML-Parser
perl-XML-LibXML
perl-XML-LibXSLT
perl-Term-ReadLine-Gnu
perl-Text-Iconv
perl-Image-Magick
perl-String-Checker-mioga
perl-Search-Xapian
cpan-simple-modules
rsync
mioga
template
template-apacheperl
template-postgres
# Those were unmaintained components inlined here
# slapos.recipe.build:cpan no longer exist, we now use perl-CPAN-package macro instead
# ../../component/perl-Crypt-SSLeay/buildout.cfg
[buildout]
extends +=
../../component/openssl/buildout.cfg
../../component/zlib/buildout.cfg
[perl-Crypt-SSLeay]
recipe = slapos.recipe.build:cpan
modules =
G/GA/GAAS/URI-1.60.tar.gz
N/NA/NANIS/Crypt-SSLeay-0.64.tar.gz
cpan-configuration =
make_arg=('OTHERLDFLAGS="-L${zlib:location}/lib -Wl,-R${zlib:location}/lib -L${openssl:location}/lib -Wl,-R${openssl:location}/lib"')
makepl_arg=('INC=-I${openssl:location}/include')
environment =
OPENSSL_PREFIX=${openssl:location}
perl = perl
# ../../component/perl-XML-LibXSLT/buildout.cfg
[buildout]
extends +=
../libxslt/buildout.cfg
../libxml2/buildout.cfg
../zlib/buildout.cfg
[perl-XML-LibXSLT]
recipe = slapos.recipe.build:cpan
cpan-configuration =
makepl_arg='LIBS="-L${libxslt:location}/lib -Wl,-R${libxslt:location}/lib -L${zlib:location}/lib -Wl,-R${zlib:location}/lib -L${libxml2:location}/lib -Wl,-R${libxml2:location}/lib" INC="-I${libxslt:location}/include -I${libxml2:location}/include/libxml2"'
modules =
S/SH/SHLOMIF/XML-LibXSLT-1.78.tar.gz
perl = perl
# ../../component/perl-Text-Iconv/buildout.cfg
[buildout]
extends +=
../libiconv/buildout.cfg
[perl-Text-Iconv]
recipe = slapos.recipe.build:cpan
cpan-configuration =
makepl_arg='LIBS="-L${libiconv:location}/lib -Wl,-R${libiconv:location}/lib" INC="-I${libiconv:location}/include"'
modules =
M/MP/MPIOTR/Text-Iconv-1.7.tar.gz
perl = perl
# ../../component/perl-DBD-Pg/buildout.cfg
[buildout]
extends +=
../postgresql/buildout.cfg
[perl-DBD-Pg]
recipe = slapos.recipe.build:cpan
modules =
T/TU/TURNSTEP/DBD-Pg-2.19.3.tar.gz
environment =
POSTGRES_HOME=${postgresql:location}
perl = perl
# ../../component/perl-Search-Xapian/buildout.cfg
[buildout]
extends +=
../xapian/buildout.cfg
[perl-Search-Xapian]
recipe = slapos.recipe.build:cpan
modules =
O/OL/OLLY/Search-Xapian-1.2.10.0.tar.gz
environment =
XAPIAN_CONFIG=${xapian:location}/bin/xapian-config
perl = perl
[eggs]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
slapos.cookbook
cns.recipe.symlink
# override perl here to keep using 5.14.x.
[perl]
recipe = hexagonit.recipe.cmmi
version = 5.14.2
url = http://www.cpan.org/src/5.0/perl-${:version}.tar.bz2
md5sum = 04a4c5d3c1f9f19d77daff8e8cd19a26
siteprefix = ${buildout:parts-directory}/site_${:_buildout_section_name_}
patch-options = -p1
patches =
${perl-keep-linker-flags-in-ldflags.patch:location}/${perl-keep-linker-flags-in-ldflags.patch:filename}
# Viktor has adapted the following commands for AMD64 compilation
# TODO: find out how we can write a generic code that suits all architectures
configure-command =
sh Configure -des \
-A ccflags=-fPIC \
-Dprefix=${buildout:parts-directory}/${:_buildout_section_name_} \
-Dsiteprefix=${:siteprefix} \
-Dcflags=-I${gdbm:location}/include \
-Dldflags="-L${gdbm:location}/lib -Wl,-rpath=${gdbm:location}/lib" \
-Ui_db \
-Dnoextensions=ODBM_File \
-Dusethreads
environment =
PATH=${patch:location}/bin:%(PATH)s
CFLAGS='-m64 -mtune=nocona'
post-make-hook = ${perl-postmakehook-download:location}/${perl-postmakehook-download:filename}:post_make_hook
[cpan-simple-modules]
recipe = slapos.recipe.build:cpan
modules =
S/ST/STBEY/Date-Calc-6.3.tar.gz
D/DC/DCOPPIT/Benchmark-Timer-0.7102.tar.gz
R/RB/RBOW/Date-ICal-2.678.tar.gz
S/SB/SBECK/Date-Manip-6.37.tar.gz
G/GB/GBARR/TimeDate-1.20.tar.gz
S/SH/SHLOMIF/Error-0.17018.tar.gz
P/PA/PARDUS/File-MimeInfo/File-MimeInfo-0.16.tar.gz
O/OV/OVID/HTML-TokeParser-Simple-3.15.tar.gz
D/DS/DSKOLL/MIME-tools-5.503.tar.gz
D/DS/DSKOLL/IO-stringy-2.110.tar.gz
C/CO/COSIMO/HTTP-DAV-0.47.tar.gz
M/MA/MARSCHAP/perl-ldap-0.48.tar.gz
F/FD/FDESAR/Parse-Yapp-1.05.tar.gz
S/SN/SNOWHARE/Unicode-MapUTF8-1.11.tar.gz
G/GA/GAAS/Unicode-String-2.09.tar.gz
T/TY/TYEMQ/Algorithm-Diff-1.1902.tar.gz
J/JG/JGMYERS/Encode-Detect-1.01.tar.gz
G/GU/GUIDO/libintl-perl-1.20.tar.gz
K/KE/KEN/XML-XML2JSON-0.06.tar.gz
A/AR/ARISTOTLE/XML-Atom-SimpleFeed-0.86.tar.gz
P/PE/PETDANCE/Test-WWW-Mechanize-1.44.tar.gz
G/GR/GRANTM/XML-Simple-2.20.tar.gz
A/AD/ADAMK/Archive-Zip-1.30.tar.gz
D/DU/DURIST/Proc-ProcessTable-0.45.tar.gz
S/SU/SULLR/Net-INET6Glue-0.5.tar.gz
perl = perl
cpan-configuration =
makepl_arg=''
make_arg=''
[perl-String-Checker-mioga]
recipe = hexagonit.recipe.cmmi
url = http://packages.alixen.org/contribs/String-Checker-0.03.tar.gz
md5sum = c750a33505609544f95eace7a2896c84
configure-command =
${perl:location}/bin/perl Makefile.PL
[mioga]
recipe = hexagonit.recipe.cmmi
version = 2.4.16
# No use re-using "version", the whole URL will change for the next one
url = http://www.alixen.org/attachments/download/89/Mioga2-2.4.16.tar.gz
md5sum = 1d2e76c798ee6d5f233011997200e125
location = ${buildout:parts-directory}/${:_buildout_section_name_}
buildinst = ${mioga:location}/buildinst
static = ${mioga:location}/static
environment =
MIOGA_BASE=${mioga:location}
MIOGA_BUILDINST=${mioga:buildinst}
MIOGA_STATIC=${mioga:static}
MIOGA_SITEPERL=${perl:siteprefix}
PATH=${libxslt:location}/bin:${libxml2:location}/bin:${perl:location}/bin:${perl:siteprefix}/bin:${rsync:location}/bin:%(PATH)s
patch-options = -p1
patches =
${mioga-patch:location}/${mioga-patch:filename}
# post-make-hook = ${mioga-postmakehook:location}/${mioga-postmakehook:filename}:post_make_hook
pre-configure-hook = ${mioga-hooks:location}/${mioga-hooks:filename}:pre_configure_hook
configure-command =
${perl:location}/bin/perl Makefile.PL
make-targets =
slapos-compilation
keep-compile-dir = true
perl-binary = ${perl:location}/bin/perl
[mioga-patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
# md5sum = b836ad89902d1ea68b091a5b9800edd8
download-only = true
filename = ${:_buildout_section_name_}
[mioga-hooks]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
# md5sum = c7ceec7788749238cb5fbe09beb647b1
download-only = true
filename = mioga-hooks.py
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
# md5sum =
output = ${buildout:directory}/template.cfg
mode = 0644
[template-apacheperl]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-apacheperl.cfg
# md5sum =
output = ${buildout:directory}/template-apacheperl.cfg
mode = 0644
compile-directory = ${mioga:compile-directory}/Mioga2-${mioga:version}
[template-postgres]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-postgres.cfg
# md5sum =
output = ${buildout:directory}/template-postgres.cfg
mode = 0644
# Exactly the same as software.cfg, but fetch the slapos.cookbook and
# slapos.toolbox from git repository instead of fetching stable version,
# allowing to play with bleeding edge environment.
# You'll need to run buildout twice for this profile.
[buildout]
extends =
../../component/git/buildout.cfg
software.cfg
parts +=
# Development parts
slapos.cookbook-repository
slapos.core-repository
slapos.toolbox-repository
check-recipe
develop =
${:parts-directory}/slapos.cookbook-repository
${:parts-directory}/slapos.core-repository
${:parts-directory}/slapos.toolbox-repository
[slapos.toolbox-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.toolbox.git
branch = master
git-executable = ${git:location}/bin/git
[slapos.cookbook-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.git
branch = tt-rss
git-executable = ${git:location}/bin/git
[slapos.core-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.core.git
branch = master
git-executable = ${git:location}/bin/git
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command =
grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link &&
grep parts ${buildout:develop-eggs-directory}/slapos.core.egg-link &&
grep parts ${buildout:develop-eggs-directory}/slapos.toolbox.egg-link
[versions]
slapos.cookbook =
slapos.toolbox =
slapos.core =
[buildout]
# Run a SQL script to populate DB at first run
[tt-rss-init]
recipe = slapos.cookbook:apachephpconfigure
table_name = db.ttrss_users
#constraint = `pn_id`>0
lampconfigure = ${buildout:bin-directory}/lampconfigure
htdocs = $${apache-php:htdocs}
mysql-username = $${apache-php:mysql-username}
mysql-password = $${apache-php:mysql-password}
mysql-database = $${apache-php:mysql-database}
mysql-host = $${apache-php:mysql-host}
mysql-port = $${apache-php:mysql-port}
configureinstall-location = $${basedirectory:scripts}/configureInstall
sql-script = ${sql-script:location}/${sql-script:filename}
# Give the correct information to apache recipe to put the hostname in the
# tt-rss config file
[apache-php]
application-url = $${request-frontend:connection-site_url}
This diff is collapsed.
[buildout]
extends =
../../stack/lamp/buildout.cfg
[versions]
slapos.cookbook =
[application]
recipe = slapos.recipe.build:download-unpacked
url = https://github.com/gothfox/Tiny-Tiny-RSS/archive/1.7.8.tar.gz
md5sum = efd7eec1629db379896fb7e74bba400e
strip-top-level-dir = true
[application-template]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/template/config.php.in
#md5sum = Student may put here md5sum of this file, this is good idea
filename = template.in
mode = 0644
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[application-configuration]
location = config.php
[sql-script]
recipe = slapos.recipe.build:download
location = ${buildout:parts-directory}/${:_buildout_section_name_}
url = ${:_profile_base_location_}/script/tt-rss.sql
#md5sum = c4d5f87d8f02cad3f20e679160195f48
filename = tt-rss.sql
mode = 0744
# XXX Should disappear and be integrated into apachephpconfigure
[configure-script]
recipe = slapos.recipe.build:download
location = ${buildout:parts-directory}/${:_buildout_section_name_}
url = ${:_profile_base_location_}/configure-tt-rss.py
#md5sum = c4d5f87d8f02cad3f20e679160195f48
filename = configure-tt-rss.py
mode = 0744
[custom-application-deployment-template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-custom.cfg.in
output = ${buildout:directory}/instance-custom.cfg
#md5sum = 283cb53ff8cd34635703e771062db919
mode = 0644
[custom-application-deployment]
path = ${custom-application-deployment-template:output}
part-list = tt-rss-init
<?php
// *******************************************
// *** Database configuration (important!) ***
// *******************************************
define('DB_TYPE', "mysql"); // or mysql
define('DB_HOST', "%(mysql_host)s");
define('DB_USER', "%(mysql_user)s");
define('DB_NAME', "%(mysql_database)s");
define('DB_PASS', "%(mysql_password)s");
define('MYSQL_CHARSET', 'UTF8');
// Connection charset for MySQL. If you have a legacy database and/or experience
// garbage unicode characters with this option, try setting it to a blank string.
// ***********************************
// *** Basic settings (important!) ***
// ***********************************
define('SELF_URL_PATH', '%(url)s');
// Full URL of your tt-rss installation. This should be set to the
// location of tt-rss directory, e.g. http://yourserver/tt-rss/
// You need to set this option correctly otherwise several features
// including PUSH, bookmarklets and browser integration will not work properly.
define('SINGLE_USER_MODE', true);
// Operate in single user mode, disables all functionality related to
// multiple users.
define('SIMPLE_UPDATE_MODE', false);
// Enables fallback update mode where tt-rss tries to update feeds in
// background while tt-rss is open in your browser.
// If you don't have a lot of feeds and don't want to or can't run
// background processes while not running tt-rss, this method is generally
// viable to keep your feeds up to date.
// Still, there are more robust (and recommended) updating methods
// available, you can read about them here: http://tt-rss.org/wiki/UpdatingFeeds
// *****************************
// *** Files and directories ***
// *****************************
//FIXME
define('PHP_EXECUTABLE', '/usr/bin/php');
// Path to PHP executable, used for various command-line tt-rss programs
define('LOCK_DIRECTORY', 'lock');
// Directory for lockfiles, must be writable to the user you run
// daemon process or cronjobs under.
define('CACHE_DIR', 'cache');
// Local cache directory for RSS feed content.
define('ICONS_DIR', "feed-icons");
define('ICONS_URL', "feed-icons");
// Local and URL path to the directory, where feed favicons are stored.
// Unless you really know what you're doing, please keep those relative
// to tt-rss main directory.
// **********************
// *** Authentication ***
// **********************
// Please see PLUGINS below to configure various authentication modules.
define('AUTH_AUTO_CREATE', true);
// Allow authentication modules to auto-create users in tt-rss internal
// database when authenticated successfully.
define('AUTH_AUTO_LOGIN', true);
// Automatically login user on remote or other kind of externally supplied
// authentication, otherwise redirect to login form as normal.
// If set to true, users won't be able to set application language
// and settings profile.
// *********************
// *** Feed settings ***
// *********************
define('FORCE_ARTICLE_PURGE', 0);
// When this option is not 0, users ability to control feed purging
// intervals is disabled and all articles (which are not starred)
// older than this amount of days are purged.
// *** PubSubHubbub settings ***
define('PUBSUBHUBBUB_HUB', '');
// URL to a PubSubHubbub-compatible hub server. If defined, "Published
// articles" generated feed would automatically become PUSH-enabled.
define('PUBSUBHUBBUB_ENABLED', false);
// Enable client PubSubHubbub support in tt-rss. When disabled, tt-rss
// won't try to subscribe to PUSH feed updates.
// *********************
// *** Sphinx search ***
// *********************
define('SPHINX_ENABLED', false);
// Enable fulltext search using Sphinx (http://www.sphinxsearch.com)
// Please see http://tt-rss.org/wiki/SphinxSearch for more information.
define('SPHINX_INDEX', 'ttrss');
// Index name in Sphinx configuration. You can specify multiple indexes
// as a comma-separated string.
// ***********************************
// *** Self-registrations by users ***
// ***********************************
define('ENABLE_REGISTRATION', false);
// Allow users to register themselves. Please be vary that allowing
// random people to access your tt-rss installation is a security risk
// and potentially might lead to data loss or server exploit. Disabled
// by default.
define('REG_NOTIFY_ADDRESS', 'user@your.domain.dom');
// Email address to send new user notifications to.
define('REG_MAX_USERS', 10);
// Maximum amount of users which will be allowed to register on this
// system. 0 - no limit.
// **********************************
// *** Cookies and login sessions ***
// **********************************
define('SESSION_COOKIE_LIFETIME', 86400*30);
// Default lifetime of a session (e.g. login) cookie. In seconds,
// 0 means cookie will be deleted when browser closes.
// Setting this to zero will affect several user preferences
// like widescreen mode not saving.
define('SESSION_EXPIRE_TIME', 86400*30);
// Hard expiration limit for sessions. Should be
// greater or equal to SESSION_COOKIE_LIFETIME
define('SESSION_CHECK_ADDRESS', 1);
// Check client IP address when validating session:
// 0 - disable checking
// 1 - check first 3 octets of an address (recommended)
// 2 - check first 2 octets of an address
// 3 - check entire address
// *********************************
// *** Email and digest settings ***
// *********************************
define('SMTP_FROM_NAME', 'Tiny Tiny RSS');
define('SMTP_FROM_ADDRESS', 'noreply@your.domain.dom');
// Name, address and subject for sending outgoing mail. This applies
// to password reset notifications, digest emails and any other mail.
define('DIGEST_SUBJECT', '[tt-rss] New headlines for last 24 hours');
// Subject line for email digests
define('SMTP_HOST', '');
// SMTP Host to send outgoing mail. Blank - use system MTA.
define('SMTP_PORT','');
// SMTP port to sent outgoing mail. Default is 25.
define('SMTP_LOGIN', '');
define('SMTP_PASSWORD', '');
// These two options enable SMTP authentication when sending
// outgoing mail. Only used with SMTP_HOST
// ***************************************
// *** Other settings (less important) ***
// ***************************************
define('CHECK_FOR_NEW_VERSION', false);
// Check for new versions of tt-rss automatically.
define('ENABLE_GZIP_OUTPUT', false);
// Selectively gzip output to improve wire performance. This requires
// PHP Zlib extension on the server.
// Enabling this can break tt-rss in several httpd/php configurations,
// if you experience weird errors and tt-rss failing to start, blank pages
// after login, or content encoding errors, disable it.
define('PLUGINS', 'auth_remote, auth_internal, note');
// Comma-separated list of plugins to load automatically for all users.
// System plugins have to be specified here. Please enable at least one
// authentication plugin here (auth_*).
// Users may enable other user plugins from Preferences/Plugins but may not
// disable plugins specified in this list.
define('CONFIG_VERSION', 26);
// Expected config version. Please update this option in config.php
// if necessary (after migrating all new options from this file).
// vim:ft=php
?>
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment