Commit dc5d3a88 authored by Jérome Perrin's avatar Jérome Perrin

Postgres recipe cleanup

* use strings for options, not `sets` which requires to use the `'!py!'` syntax and only works on a patched buildout
* supports regenerating config, like when for example IP change
* Removes the backup part of the recipe, which we don't use and looks buggy.
* fix python3 support

See merge request !727
parents d783653f 8f35e4a3
Pipeline #8896 failed with stage
in 0 seconds
...@@ -141,8 +141,6 @@ setup(name=name, ...@@ -141,8 +141,6 @@ setup(name=name,
'onetimeupload = slapos.recipe.onetimeupload:Recipe', 'onetimeupload = slapos.recipe.onetimeupload:Recipe',
'pbs = slapos.recipe.pbs:Recipe', 'pbs = slapos.recipe.pbs:Recipe',
'postgres = slapos.recipe.postgres:Recipe', 'postgres = slapos.recipe.postgres:Recipe',
'postgres.export = slapos.recipe.postgres.backup:ExportRecipe',
'postgres.import = slapos.recipe.postgres.backup:ImportRecipe',
'proactive = slapos.recipe.proactive:Recipe', 'proactive = slapos.recipe.proactive:Recipe',
'promise.plugin= slapos.recipe.promise_plugin:Recipe', 'promise.plugin= slapos.recipe.promise_plugin:Recipe',
'publish = slapos.recipe.publish:Recipe', 'publish = slapos.recipe.publish:Recipe',
......
...@@ -42,7 +42,8 @@ class Recipe(GenericBaseRecipe): ...@@ -42,7 +42,8 @@ class Recipe(GenericBaseRecipe):
- a Postgres cluster - a Postgres cluster
- configuration to allow connections from IPv4, IPv6 or unix socket. - configuration to allow connections from IPv4, IPv6 or unix socket.
- a superuser with provided name and generated password IPv4 and IPv6 can be disabled, unix socket will always be available.
- a superuser with provided name and password
- a database with provided name - a database with provided name
- a start script in the services directory - a start script in the services directory
...@@ -52,50 +53,59 @@ class Recipe(GenericBaseRecipe): ...@@ -52,50 +53,59 @@ class Recipe(GenericBaseRecipe):
dbname dbname
name of the database to be used by the application. name of the database to be used by the application.
ipv4 ipv4
set of ipv4 to listen on. ipv4 to listen on, can be multiple ips or can be empty.
ipv6 ipv6
set of ipv6 to listen on. ipv6 to listen on, can be multiple ips or can be empty.
port
port to listen on, same for both IPv4 and IPv6.
pgdata-directory pgdata-directory
path to postgres configuration and data. path to postgres configuration and data.
services services
must be ${buildout:directory}/etc/service. must be ${buildout:directory}/etc/service.
superuser superuser
name of the superuser to create. name of the superuser to create.
password
password for the superuser.
Exposed options: Exposed options:
password
generated password for the superuser.
url url
generated DBAPI connection string. generated DBAPI connection string, on IPv6.
it can be used as-is (ie. in sqlalchemy) or by the _urlparse.py recipe. it can be used as-is (ie. in sqlalchemy) or by the _urlparse.py recipe.
this is only available if at least one IPv6 was provided.
""" """
def _options(self, options): def _options(self, options):
options['url'] = 'postgresql://%(superuser)s:%(password)s@[%(ipv6-random)s]:%(port)s/%(dbname)s' % options if options.get('ipv6'):
options['url'] = "postgresql://{superuser}:{password}@[{ipv6}]:{port}/{dbname}".format(
superuser=options['superuser'],
password=options['password'],
ipv6=options['ipv6'].splitlines()[0],
port=options['port'],
dbname=options['dbname'],
)
def install(self): def install(self):
pgdata = self.options['pgdata-directory'] pgdata = self.options['pgdata-directory']
# if the pgdata already exists, skip all steps, we don't need to do anything. paths = []
# if the pgdata already exists, we don't need to recreate databases.
if not os.path.exists(pgdata): if not os.path.exists(pgdata):
try: try:
self.createCluster() self.createCluster()
self.createConfig() paths.extend(self.createConfig())
self.createDatabase() self.createDatabase()
self.updateSuperuser() self.updateSuperuser()
self.createRunScript() paths.extend(self.createRunScript())
except: except:
# do not leave half-installed postgresql - else next time we # do not leave half-installed postgresql - else next time we
# run we won't update it. # run we won't update it.
shutil.rmtree(pgdata) shutil.rmtree(pgdata)
raise raise
else: else:
self.createConfig() paths.extend(self.createConfig())
self.createRunScript() paths.extend(self.createRunScript())
return [] return paths
update = install update = install
...@@ -129,10 +139,11 @@ class Recipe(GenericBaseRecipe): ...@@ -129,10 +139,11 @@ class Recipe(GenericBaseRecipe):
def createConfig(self): def createConfig(self):
pgdata = self.options['pgdata-directory'] pgdata = self.options['pgdata-directory']
ipv4 = self.options['ipv4'] ipv4 = self.options['ipv4'].splitlines()
ipv6 = self.options['ipv6'] ipv6 = self.options['ipv6'].splitlines()
with open(os.path.join(pgdata, 'postgresql.conf'), 'wb') as cfg: postgres_conf = os.path.join(pgdata, 'postgresql.conf')
with open(postgres_conf, 'w') as cfg:
cfg.write(textwrap.dedent("""\ cfg.write(textwrap.dedent("""\
listen_addresses = '%s' listen_addresses = '%s'
logging_collector = on logging_collector = on
...@@ -149,11 +160,12 @@ class Recipe(GenericBaseRecipe): ...@@ -149,11 +160,12 @@ class Recipe(GenericBaseRecipe):
unix_socket_directories = '%s' unix_socket_directories = '%s'
unix_socket_permissions = 0700 unix_socket_permissions = 0700
""" % ( """ % (
','.join(ipv4.union(ipv6)), ','.join(set(ipv4).union(ipv6)),
pgdata, pgdata,
))) )))
with open(os.path.join(pgdata, 'pg_hba.conf'), 'wb') as cfg: pg_hba_conf = os.path.join(pgdata, 'pg_hba.conf')
with open(pg_hba_conf, 'w') as cfg:
# see http://www.postgresql.org/docs/9.2/static/auth-pg-hba-conf.html # see http://www.postgresql.org/docs/9.2/static/auth-pg-hba-conf.html
cfg_lines = [ cfg_lines = [
...@@ -174,7 +186,7 @@ class Recipe(GenericBaseRecipe): ...@@ -174,7 +186,7 @@ class Recipe(GenericBaseRecipe):
cfg_lines.append('host all all %s/%s md5' % (ip, ipv6_netmask_bits)) cfg_lines.append('host all all %s/%s md5' % (ip, ipv6_netmask_bits))
cfg.write('\n'.join(cfg_lines)) cfg.write('\n'.join(cfg_lines))
return postgres_conf, pg_hba_conf
def createDatabase(self): def createDatabase(self):
self.runPostgresCommand(cmd='CREATE DATABASE "%s"' % self.options['dbname']) self.runPostgresCommand(cmd='CREATE DATABASE "%s"' % self.options['dbname'])
...@@ -232,6 +244,6 @@ class Recipe(GenericBaseRecipe): ...@@ -232,6 +244,6 @@ class Recipe(GenericBaseRecipe):
-D %(pgdata-directory)s -D %(pgdata-directory)s
""" % self.options) """ % self.options)
name = os.path.join(self.options['services'], 'postgres-start') name = os.path.join(self.options['services'], 'postgres-start')
self.createExecutable(name, content=content) return [self.createExecutable(name, content=content)]
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import textwrap
from slapos.recipe.librecipe import GenericBaseRecipe
class ExportRecipe(GenericBaseRecipe):
"""\
This recipe creates an exporter script for using with the resilient stack.
Required options:
backup-directory
folder that will contain the dump file.
bin
path to the 'pg_dump' binary.
dbname
name of the database to dump.
pgdata-directory
path to postgres configuration and data.
wrapper
full path of the exporter script to create.
"""
def install(self):
wrapper = self.options['wrapper']
self.createBackupScript(wrapper)
return [wrapper]
def createBackupScript(self, wrapper):
"""\
Create a script to backup the database in 'custom' format.
"""
content = textwrap.dedent("""\
#!/bin/sh
umask 077
%(bin)s/pg_dump \\
--host=%(pgdata-directory)s \\
--username postgres \\
--format=custom \\
--file=%(backup-directory)s/database.dump \\
%(dbname)s
""" % self.options)
self.createExecutable(wrapper, content=content)
class ImportRecipe(GenericBaseRecipe):
"""\
This recipe creates an importer script for using with the resilient stack.
Required options:
backup-directory
folder that contains the dump file.
bin
path to the 'pg_restore' binary.
dbname
name of the database to restore.
pgdata-directory
path to postgres configuration and data.
wrapper
full path of the importer script to create.
"""
def install(self):
wrapper = self.options['wrapper']
self.createRestoreScript(wrapper)
return [wrapper]
def createRestoreScript(self, wrapper):
"""\
Create a script to restore the database from 'custom' format.
"""
content = textwrap.dedent("""\
#!/bin/sh
%(bin)s/pg_restore \\
--host=%(pgdata-directory)s \\
--username postgres \\
--dbname=%(dbname)s \\
--clean \\
--no-owner \\
--no-acl \\
%(backup-directory)s/database.dump
""" % self.options)
self.createExecutable(wrapper, content=content)
import unittest
import tempfile
import shutil
import os.path
import zc.buildout.testing
class PostgresTest(unittest.TestCase):
def setUp(self):
self.buildout = buildout = zc.buildout.testing.Buildout()
self.pgdata_directory = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.pgdata_directory)
self.services_directory = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.services_directory)
buildout['postgres'] = {
'bin': 'software/parts/postgres/bin/',
'dbname': 'dbname',
'ipv4': '127.0.0.1',
'ipv6': '::1',
'port': '5443',
'pgdata-directory': self.pgdata_directory,
'services': self.services_directory,
'superuser': 'superuser',
'password': 'secret',
}
from slapos.recipe import postgres
self.recipe = postgres.Recipe(
buildout,
'postgres',
buildout['postgres'])
def test_options(self):
self.assertEqual(
'postgresql://superuser:secret@[::1]:5443/dbname',
self.buildout['postgres']['url'])
def test_install(self):
installed = self.recipe.install()
self.assertIn('postgresql.conf', os.listdir(self.pgdata_directory))
self.assertIn('pg_hba.conf', os.listdir(self.pgdata_directory))
self.assertIn('postgres-start', os.listdir(self.services_directory))
self.assertEqual(
sorted(installed),
sorted([
os.path.join(self.pgdata_directory, 'postgresql.conf'),
os.path.join(self.pgdata_directory, 'pg_hba.conf'),
os.path.join(self.services_directory, 'postgres-start')]))
...@@ -33,8 +33,6 @@ The backup data is automatically used to build an historical, incremental archiv ...@@ -33,8 +33,6 @@ The backup data is automatically used to build an historical, incremental archiv
export export
------ ------
example:
https://lab.nexedi.com/nexedi/slapos/blob/HEAD/stack/lapp/postgres/instance-postgres-export.cfg.in
This is the *active* instance - the one providing live data to the application. This is the *active* instance - the one providing live data to the application.
...@@ -45,7 +43,6 @@ A backup is run via the bin/exporter script: it will ...@@ -45,7 +43,6 @@ A backup is run via the bin/exporter script: it will
The pull-backup, upon receiving the notification, will make a copy of the data and transmit it to the 'import' instances. The pull-backup, upon receiving the notification, will make a copy of the data and transmit it to the 'import' instances.
You should provide the bin/{mysoftware}-exporter script, see for instance You should provide the bin/{mysoftware}-exporter script, see for instance
https://lab.nexedi.com/nexedi/slapos/blob/HEAD/slapos/recipe/postgres/__init__.py#L207
https://lab.nexedi.com/nexedi/slapos/blob/1.0.142/slapos/recipe/mydumper.py#L71 https://lab.nexedi.com/nexedi/slapos/blob/1.0.142/slapos/recipe/mydumper.py#L71
By default, as defined in By default, as defined in
...@@ -57,9 +54,6 @@ the bin/exporter script is run every 60 minutes. ...@@ -57,9 +54,6 @@ the bin/exporter script is run every 60 minutes.
import import
------ ------
example:
https://lab.nexedi.com/nexedi/slapos/blob/HEAD/stack/lapp/postgres/instance-postgres-import.cfg.in
This is the *fallback* instance - the one that can be activated and thus become active. This is the *fallback* instance - the one that can be activated and thus become active.
Any number of import instances can be used. Deciding which one should take over can be done manually Any number of import instances can be used. Deciding which one should take over can be done manually
or through a monitoring + election script. or through a monitoring + election script.
...@@ -67,7 +61,6 @@ or through a monitoring + election script. ...@@ -67,7 +61,6 @@ or through a monitoring + election script.
You should provide the bin/{mysoftware}-importer script, see for instance You should provide the bin/{mysoftware}-importer script, see for instance
https://lab.nexedi.com/nexedi/slapos/blob/HEAD/slapos/recipe/postgres/__init__.py#L233
https://lab.nexedi.com/nexedi/slapos/blob/1.0.142/slapos/recipe/mydumper.py#L71 https://lab.nexedi.com/nexedi/slapos/blob/1.0.142/slapos/recipe/mydumper.py#L71
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment