Commit 88652000 authored by Jason Madden's avatar Jason Madden

Remove old release scripts that no longer work and document the new workflow in development.rst.

[skip ci]
parent f9e578fc
......@@ -3,7 +3,7 @@ recursive-include examples *
recursive-include src/gevent *
recursive-include docs *
recursive-include deps *
recursive-include util *
include LICENSE
include NOTICE
......
......@@ -205,3 +205,34 @@ monitor test coverage.
.. _coverage.py: https://pypi.python.org/pypi/coverage/
.. _coveralls.io: https://coveralls.io/github/gevent/gevent
.. _AppVeyor: https://ci.appveyor.com/project/denik/gevent
Releasing gevent
================
.. note:: This is a semi-organized collection of notes for gevent
maintainers.
gevent is released using `zest.releaser
<https://pypi.org/project/zest.releaser/>`_. The general flow is
something like this:
1. Push all relevant changes to master.
2. From the gevent working copy, run ``prerelease``. Fix any issues it
brings up. Let it bump the version number (or enter the correct
one) and commit.
3. Run ``release``. Let it create the tag and commit it; let it create
an sdist, but **do not** let it upload it.
4. Push the tag and master to github.
5. Let appveyor build the tag. Download all the built wheels from that
release. The easiest way to do that is with Ned Batchelder's
`appveyor-download.py script
<https://bitbucket.org/ned/coveragepy/src/tip/ci/download_appveyor.py>`_.
6. Meanwhile, spin up docker and from the root of the gevent checkout
run ``scripts/releases/make-manylinux``. This creates wheels in
``wheelhouse/``.
7. If on a mac, ``cd scripts/releases && ./geventreleases.sh``. This
creates wheels in ``/tmp/gevent/``.
8. Upload the Appveyor, manylinux, and mac wheels to pypi using
``twine``. Also be sure to upload the sdist!
9. Run ``postrelease``, let it bump the version and push the changes
to github.
#!/usr/bin/env python
# Copyright (C) 2011-2012 Denis Bilenko (http://denisbilenko.com)
# Copyright (C) 2015-2016 gevent contributors
######################################
######################################
######################################
### WARNING WARNING WARNING WARNING
##
## This script is unmaintained and no
## longer in use in this project due to
## bugs.
## See https://github.com/gevent/gevent/issues/1076
##
### WARNING WARNING WARNING WARNING
######################################
######################################
######################################
from __future__ import print_function
import sys
import os
import os.path
import re
import traceback
import datetime
import difflib
from hashlib import md5
from itertools import combinations, product
import subprocess
import multiprocessing
import tempfile
import shutil
from collections import OrderedDict
import threading
class Thread(threading.Thread):
value = None
def run(self):
target = getattr(self, '_target', None) # Py3
if target is None:
target = getattr(self, '_Thread__target')
args = getattr(self, '_Thread__args')
else:
args = self._args
self.value = target(*args)
do_exec = None
if sys.version_info >= (3, 0):
exec("def do_exec(co, loc): exec(co, loc)\n")
else:
exec("def do_exec(co, loc): exec co in loc\n")
CYTHON = os.environ.get('CYTHON') or 'cython'
DEBUG = os.environ.get('CYTHONPP_DEBUG', False)
TRACE = DEBUG == 'trace'
WRITE_OUTPUT = False
if os.getenv('READTHEDOCS'):
# Sometimes RTD fails to put our virtualenv bin directory
# on the PATH, meaning we can't run cython. Fix that.
new_path = os.environ['PATH'] + os.pathsep + os.path.dirname(sys.executable)
os.environ['PATH'] = new_path
# Parameter name in macros must match this regex:
param_name_re = re.compile(r'^[a-zA-Z_]\w*$')
# First line of a definition of a new macro:
define_re = re.compile(r'^#define\s+([a-zA-Z_]\w*)(\((?:[^,)]+,)*[^,)]+\))?\s+(.*)$')
# cython header:
cython_header_re = re.compile(r'^/\* (generated by cython [^\s*]+)[^*]+\*/$', re.I)
#assert cython_header_re.match('/* Generated by Cython 0.21.1 */').group(1) == 'Generated by Cython 0.21.1'
#assert cython_header_re.match('/* Generated by Cython 0.19 on 55-555-555 */').group(1) == 'Generated by Cython 0.19'
class EmptyConfigurationError(TypeError):
pass
class Configuration(frozenset):
"""
A set of CPP conditions that apply to a given sequence
of lines. Sometimes referred to as a "tag".
Configurations are iterated in sorted order for consistency
across runs.
"""
__slots__ = ('_sorted',)
_cache = {}
def __new__(cls, iterable):
sorted_iterable = tuple(sorted(frozenset(iterable)))
if not sorted_iterable:
raise EmptyConfigurationError("Empty configurations not allowed")
if sorted_iterable not in cls._cache:
if not all(isinstance(x, Condition) for x in sorted_iterable):
raise TypeError("Must be iterable of conditions")
self = frozenset.__new__(cls, sorted_iterable)
self._sorted = sorted_iterable
cls._cache[sorted_iterable] = self
return cls._cache[sorted_iterable]
def union(self, other):
return Configuration(frozenset.union(self, other))
def __add__(self, conditions):
return self.union(conditions)
def difference(self, other):
try:
return Configuration(frozenset.difference(self, other))
except EmptyConfigurationError:
raise EmptyConfigurationError(
"Couldn't subtract %r from %r" % (self, other))
def __sub__(self, other):
return self.difference(other)
def __iter__(self):
return iter(self._sorted)
def format_tag(self):
return ' && '.join([x.format_cond() for x in self])
def __repr__(self):
return "Configuration({" + ', '.join((repr(x) for x in self)) + '})'
@property
def all_directives(self):
"All the directives in the conditions of this configuration"
return set(x.directive for x in self)
def is_impossible(self):
"""
Return whether the configuration (a Configuration) contradicts itself.
"""
conds = {}
for cond_name, cond_setting in self:
if cond_name in conds:
if conds.get(cond_name) != cond_setting:
return True
conds[cond_name] = cond_setting
def is_condition_true(self, directive):
if directive.startswith('#if '):
parameter = directive.split(' ', 1)[1]
elif directive.startswith('#ifdef '):
parameter = directive.split(' ', 1)[1]
parameter = 'defined(%s)' % parameter
else:
raise AssertionError('Invalid directive: %r' % directive)
cond = (parameter, True)
return cond in self
def attach_tags(self, text):
result = [x for x in text.split('\n')]
if result and not result[-1]:
del result[-1]
return [Str(x + '\n', self) for x in result]
@classmethod
def get_configurations(cls, filename):
"""
Returns a set of Configuration objects representing
the configurations seen in the file.
"""
conditions = set()
condition_stack = []
linecount = 0
match_condition = Condition.match_condition
with open(filename) as f:
for line in f:
linecount += 1
try:
m = match_condition(line)
if m is None:
if condition_stack: # added
conditions.add(cls(condition_stack))
continue
split = m.group(1).strip().split(' ', 1)
directive = split[0].strip()
if len(split) == 1:
parameter = None
assert directive in ('else', 'endif'), directive
else:
parameter = split[1].strip()
assert directive in ('if', 'ifdef'), directive
if directive == 'ifdef':
directive = 'if'
parameter = 'defined(%s)' % parameter
if directive == 'if':
condition_stack.append(Condition(parameter, True))
elif directive == 'else':
if not condition_stack:
raise SyntaxError('Unexpected "#else"')
last_cond, true = condition_stack.pop()
assert true is True, true
condition_stack.append(Condition(last_cond, not true))
elif directive == 'endif':
if not condition_stack:
raise SyntaxError('Unexpected "#endif"')
condition_stack.pop()
else:
raise AssertionError('Internal error')
except BaseException as ex:
log('%s:%s: %s', filename, linecount, ex)
if isinstance(ex, SyntaxError):
sys.exit(1)
else:
raise
dbg("Found conditions %s", conditions)
return conditions
@classmethod
def get_permutations_of_configurations(cls, items):
"""
Returns a set of Configuration objects representing all the
possible permutations of the given list of configuration
objects. Impossible configurations are excluded.
"""
def flattened(tuple_of_configurations):
# product() produces a list of tuples. Each
# item in the tuple is a different configuration object.
set_of_configurations = set(tuple_of_configurations)
sorted_set_of_configurations = sorted(set_of_configurations)
conditions = []
for configuration in sorted_set_of_configurations:
for condition in configuration:
conditions.append(condition)
return cls(conditions)
flattened_configurations = (flattened(x) for x in product(items, repeat=len(items)))
possible_configurations = set((x for x in flattened_configurations if not x.is_impossible()))
return possible_configurations
@classmethod
def get_permutations_of_configurations_in_file(cls, filename):
"""
Returns a sorted list of unique configurations possible in the given
file.
"""
return sorted(cls.get_permutations_of_configurations(cls.get_configurations(filename)))
@classmethod
def get_complete_configurations(cls, filename):
"""
Return a sorted list of the set of unique configurations possible
in the given file; each configuration will have the all the conditions
it specifies, plus the implicit conditions that it does not specify.
"""
configurations = cls.get_permutations_of_configurations_in_file(filename)
all_cond_names = set()
for config in configurations:
all_cond_names = all_cond_names.union(config.all_directives)
result = set()
for configuration in configurations:
cond_names_in_configuration = configuration.all_directives
cond_names_not_in_configuration = all_cond_names - cond_names_in_configuration
for missing_cond_name in cond_names_not_in_configuration:
configuration = configuration + (Condition(missing_cond_name, False), )
result.add(cls(sorted(configuration)))
# XXX: Previously, this produced eight configurations for gevent/corecext.ppyx
# (containing all the possible permutations).
# But two of them produced identical results and were hashed as such
# by run_cython_on_files. We're now producing just the 6 results that
# are distinct in that case. I'm not exactly sure why
assert all(isinstance(x, Configuration) for x in result)
return sorted(result)
class Condition(tuple):
"""
A single CPP directive.
Two-tuple: (name, True|False)
"""
# Conditional directive:
condition_re = re.compile(r'^#(ifdef\s+.+|if\s+.+|else\s*|endif\s*)$')
_cache = {}
__slots__ = ()
def __new__(cls, *args):
if len(args) == 2:
# name, value; from literal constructor
sequence = args
elif len(args) == 1:
sequence = args[0]
else:
raise TypeError("wrong argument number", args)
if sequence not in cls._cache:
if len(sequence) != 2:
raise TypeError("Must be len 2", sequence)
if not isinstance(sequence[0], str) or not isinstance(sequence[1], bool):
raise TypeError("Must be (str, bool)")
cls._cache[sequence] = tuple.__new__(cls, sequence)
return cls._cache[sequence]
def __repr__(self):
return "Condition" + tuple.__repr__(self)
@property
def directive(self):
return self[0]
@property
def value(self):
return self[1]
def format_cond(self):
if self.value:
return self.directive
return '!' + self.directive
def inverted(self):
return Condition(self.directive, not self.value)
@classmethod
def match_condition(cls, line):
line = line.strip()
if line.endswith(':'):
return None
return cls.condition_re.match(line)
class ConfigurationGroups(tuple):
"""
A sequence of Configurations that apply to the given line.
These are maintained in sorted order.
"""
_cache = {}
def __new__(cls, tags):
sorted_tags = tuple(sorted(tags))
if sorted_tags not in cls._cache:
if not all(isinstance(x, Configuration) for x in tags):
raise TypeError("Must be a Configuration", tags)
self = tuple.__new__(cls, sorted(tags))
self._simplified = False
cls._cache[sorted_tags] = self
return cls._cache[sorted_tags]
def __repr__(self):
return "ConfigurationGroups" + tuple.__repr__(self)
def __add__(self, other):
l = list(self)
l.extend(other)
return ConfigurationGroups(l)
def exact_reverse(self, tags2):
if not self:
return
if not tags2:
return
if not isinstance(self, tuple):
raise TypeError(repr(self))
if not isinstance(tags2, tuple):
raise TypeError(repr(tags2))
if len(self) == 1 and len(tags2) == 1:
tag1 = self[0]
tag2 = tags2[0]
assert isinstance(tag1, Configuration), tag1
assert isinstance(tag2, Configuration), tag2
if len(tag1) == 1 and len(tag2) == 1:
tag1 = list(tag1)[0]
tag2 = list(tag2)[0]
if tag1[0] == tag2[0]:
return sorted([tag1[1], tag2[1]]) == [False, True]
def format_tags(self):
return ' || '.join('(%s)' % x.format_tag() for x in sorted(self))
def simplify_tags(self):
"""
>>> simplify_tags([set([('defined(world)', True), ('defined(hello)', True)]),
... set([('defined(world)', False), ('defined(hello)', True)])])
[set([('defined(hello)', True)])]
>>> simplify_tags([set([('defined(LIBEV_EMBED)', True), ('defined(_WIN32)', True)]), set([('defined(LIBEV_EMBED)', True),
... ('defined(_WIN32)', False)]), set([('defined(_WIN32)', False), ('defined(LIBEV_EMBED)', False)]),
... set([('defined(LIBEV_EMBED)', False), ('defined(_WIN32)', True)])])
[]
"""
if self._simplified:
return self
if (len(self) == 2
and len(self[0]) == len(self[1]) == 1
and list(self[0])[0] == list(self[1])[0].inverted()):
# This trivially simplifies to the empty group
# Its defined(foo, True) || defined(foo, False)
return ConfigurationGroups(()).simplify_tags()
for tag1, tag2 in sorted(combinations(self, 2)):
if tag1 == tag2:
tags = list(self)
tags.remove(tag1)
return ConfigurationGroups(tags).simplify_tags()
for condition in tag1:
inverted_condition = condition.inverted()
if inverted_condition == tag2:
continue
if inverted_condition in tag2:
tag1_copy = tag1 - {inverted_condition}
tag2_copy = tag2 - {inverted_condition}
assert isinstance(tag1_copy, Configuration), tag1_copy
assert isinstance(tag2_copy, Configuration), tag2_copy
if tag1_copy == tag2_copy:
tags = list(self)
tags.remove(tag1)
tags.remove(tag2)
tags.append(tag1_copy)
return ConfigurationGroups(tags).simplify_tags()
self._simplified = True
return self
newline_token = ' <cythonpp.py: REPLACE WITH NEWLINE!> '
def _run_cython_on_file(configuration, pyx_filename,
py_banner, banner,
output_filename,
counter, lines,
cache=None,
module_name=None):
value = ''.join(lines)
sourcehash = md5(value.encode("utf-8")).hexdigest()
comment = configuration.format_tag() + " hash:" + str(sourcehash)
if os.path.isabs(output_filename):
raise ValueError("output cannot be absolute")
# We can't change the actual name of the pyx file because
# cython generates function names based in that string.
# XXX: Note that this causes cython to generate
# a "corecext" name instead of "gevent.corecext"
tempdir = tempfile.mkdtemp()
#unique_pyx_filename = pyx_filename
#unique_output_filename = output_filename
unique_pyx_filename = os.path.join(tempdir, module_name or pyx_filename)
unique_output_filename = os.path.join(tempdir, output_filename)
dirname = os.path.dirname(unique_pyx_filename) # output must be in same dir
dbg("Output filename %s", unique_output_filename)
if dirname and not os.path.exists(dirname):
dbg("Making dir %s", dirname)
os.makedirs(dirname)
try:
atomic_write(unique_pyx_filename, py_banner + value)
if WRITE_OUTPUT:
atomic_write(unique_pyx_filename + '.deb', '# %s (%s)\n%s' % (banner, comment, value))
output = run_cython(unique_pyx_filename, sourcehash, unique_output_filename, banner, comment,
cache)
if WRITE_OUTPUT:
atomic_write(unique_output_filename + '.deb', output)
finally:
if not DEBUG:
shutil.rmtree(tempdir, True)
return configuration.attach_tags(output), configuration, sourcehash
def _run_cython_on_files(pyx_filename, py_banner, banner, output_filename, preprocessed,
module_name=None):
counter = 0
threads = []
cache = {}
for configuration, lines in sorted(preprocessed.items()):
counter += 1
threads.append(Thread(target=_run_cython_on_file,
args=(configuration, pyx_filename,
py_banner, banner, output_filename,
counter, lines,
cache, module_name)))
threads[-1].start()
for t in threads:
t.join()
same_results = {} # {sourcehash: tagged_str}
for t in threads:
if not t.value:
log("Thread %s failed.", t)
return
sourcehash = t.value[2]
tagged_output = t.value[0]
if sourcehash not in same_results:
same_results[sourcehash] = tagged_output
else:
# Nice, something to combine with tags
other_tagged_output = same_results[sourcehash]
assert len(tagged_output) == len(other_tagged_output)
combined_lines = []
for line_a, line_b in zip(tagged_output, other_tagged_output):
combined_tags = line_a.tags + line_b.tags
combined_lines.append(Str(line_a, combined_tags.simplify_tags()))
same_results[sourcehash] = combined_lines
# Order them as they were processed for repeatability
ordered_results = []
for t in threads:
if t.value[0] not in ordered_results:
ordered_results.append(same_results[t.value[2]])
return ordered_results
def process_filename(filename, output_filename=None, module_name=None):
"""Process the .ppyx file with preprocessor and compile it with cython.
The algorithm is as following:
1) Identify all possible preprocessor conditions in *filename*.
2) Run preprocess_filename(*filename*) for each of these conditions.
3) Process the output of preprocessor with Cython (as many times as
there are different sources generated for different preprocessor
definitions.
4) Merge the output of different Cython runs using preprocessor conditions
identified in (1).
"""
if output_filename is None:
output_filename = filename.rsplit('.', 1)[0] + '.c'
pyx_filename = filename.rsplit('.', 1)[0] + '.pyx'
assert pyx_filename != filename
timestamp = str(datetime.datetime.now().replace(microsecond=0))
banner = 'Generated by cythonpp.py on %s' % timestamp
py_banner = '# %s\n' % banner
preprocessed = {}
for configuration in Configuration.get_complete_configurations(filename):
dbg("Processing %s", configuration)
preprocessed[configuration] = preprocess_filename(filename, configuration)
preprocessed[None] = preprocess_filename(filename, None)
preprocessed = expand_to_match(preprocessed.items())
reference_pyx = preprocessed.pop(None)
sources = _run_cython_on_files(pyx_filename, py_banner, banner, output_filename,
preprocessed, module_name)
if sources is None:
log("At least one thread failed to run")
sys.exit(1)
log('Generating %s ', output_filename)
result = generate_merged(sources)
result_hash = md5(''.join(result.split('\n')[4:]).encode("utf-8")).hexdigest()
atomic_write(output_filename, result)
log('%s bytes of hash %s\n', len(result), result_hash)
if filename != pyx_filename:
log('Saving %s', pyx_filename)
atomic_write(pyx_filename, py_banner + ''.join(reference_pyx))
def generate_merged(sources):
result = []
for line in produce_preprocessor(merge(sources)):
result.append(line.replace(newline_token, '\n'))
return ''.join(result)
def preprocess_filename(filename, config):
"""Process given .ppyx file with preprocessor.
This does the following
1) Resolves "#if"s and "#ifdef"s using config
2) Expands macro definitions (#define)
"""
linecount = 0
current_name = None
definitions = OrderedDict()
result = []
including_section = []
with open(filename) as f:
for line in f:
linecount += 1
rstripped = line.rstrip()
stripped = rstripped.lstrip()
try:
if current_name is not None:
name = current_name
value = rstripped
if value.endswith('\\'):
value = value[:-1].rstrip()
else:
current_name = None
definitions[name]['lines'].append(value)
else:
if not including_section or including_section[-1]:
m = define_re.match(stripped)
else:
m = None
if m is not None:
name, params, value = m.groups()
value = value.strip()
if value.endswith('\\'):
value = value[:-1].rstrip()
current_name = name
definitions[name] = {'lines': [value]}
if params is None:
trace('Adding definition for %r', name)
else:
definitions[name]['params'] = parse_parameter_names(params)
trace('Adding definition for %r: %s', name, definitions[name]['params'])
else:
m = Condition.match_condition(stripped)
if m is not None and config is not None:
if stripped == '#else':
if not including_section:
raise SyntaxError('unexpected "#else"')
if including_section[-1]:
including_section.pop()
including_section.append(False)
else:
including_section.pop()
including_section.append(True)
elif stripped == '#endif':
if not including_section:
raise SyntaxError('unexpected "#endif"')
including_section.pop()
else:
including_section.append(config.is_condition_true(stripped))
else:
if including_section and not including_section[-1]:
pass # skip this line because last "#if" was false
else:
if stripped.startswith('#'):
# leave comments as is
result.append(Str_sourceline(line, linecount - 1))
else:
lines = expand_definitions(line, definitions).split('\n')
if lines and not lines[-1]:
del lines[-1]
lines = [x + '\n' for x in lines]
lines = [Str_sourceline(x, linecount - 1) for x in lines]
result.extend(lines)
except BaseException as ex:
log('%s:%s: %s', filename, linecount, ex)
if isinstance(ex, SyntaxError):
sys.exit(1)
else:
raise
return result
def merge(sources):
r"""Merge different sources into a single one. Each line of the result
is a subclass of string that maintains the information for each configuration
it should appear in the result.
>>> src1 = attach_tags('hello\nworld\n', set([('defined(hello)', True), ('defined(world)', True)]))
>>> src2 = attach_tags('goodbye\nworld\n', set([('defined(hello)', False), ('defined(world)', True)]))
>>> src3 = attach_tags('hello\neveryone\n', set([('defined(hello)', True), ('defined(world)', False)]))
>>> src4 = attach_tags('goodbye\neveryone\n', set([('defined(hello)', False), ('defined(world)', False)]))
>>> from pprint import pprint
>>> pprint(merge([src1, src2, src3, src4]))
[Str('hello\n', [set([('defined(hello)', True)])]),
Str('goodbye\n', [set([('defined(hello)', False)])]),
Str('world\n', [set([('defined(world)', True)])]),
Str('everyone\n', [set([('defined(world)', False)])])]
"""
sources = list(sources) # own copy
dbg("Merging %s", len(sources))
if len(sources) <= 1:
return [Str(str(x), x.tags.simplify_tags()) for x in sources[0]]
if not DEBUG:
pool = multiprocessing.Pool()
else:
class SerialPool(object):
def imap(self, func, arg_list):
return [func(*args) for args in arg_list]
def apply(self, func, args):
return func(*args)
pool = SerialPool()
groups = []
while len(sources) >= 2:
one, two = sources.pop(), sources.pop()
groups.append((one, two))
dbg("Merge groups %s", len(groups))
# len sources == 0 or 1
for merged in pool.imap(_merge, groups):
dbg("Completed a merge in %s", os.getpid())
sources.append(merged)
# len sources == 1 or 2
if len(sources) == 2:
one, two = sources.pop(), sources.pop()
sources.append(pool.apply(_merge, (one, two)))
# len sources == 1
# len sources should now be 1
dbg("Now merging %s", len(sources))
return merge(sources)
def _merge(*args):
if isinstance(args[0], tuple):
a, b = args[0]
else:
a, b = args
return list(_imerge(a, b))
def _imerge(a, b):
# caching the tags speeds up serialization and future merges
tag_cache = {}
for tag, i1, i2, j1, j2 in difflib.SequenceMatcher(None, a, b).get_opcodes():
if tag == 'equal':
for line_a, line_b in zip(a[i1:i2], b[j1:j2]):
# tags is a tuple of frozensets
line_a_tags = line_a.tags #getattr(line_a, 'tags', ())
line_b_tags = line_b.tags #getattr(line_b, 'tags', ())
key = (line_a_tags, line_b_tags)
tags = tag_cache.setdefault(key, line_a_tags + line_b_tags)
assert isinstance(tags, ConfigurationGroups)
yield Str(line_a, tags)
else:
for line in a[i1:i2]:
yield line
for line in b[j1:j2]:
yield line
def expand_to_match(items):
"""Insert empty lines so that all sources has matching line numbers for the same code"""
cfg2newlines = {} # maps configuration -> list
for configuration, lines in items:
cfg2newlines[configuration] = []
maxguard = 2 ** 30
while True:
minimalsourceline = maxguard
for configuration, lines in items:
if lines:
minimalsourceline = min(minimalsourceline, lines[0].sourceline)
if minimalsourceline == maxguard:
break
for configuration, lines in items:
if lines and lines[0].sourceline <= minimalsourceline:
cfg2newlines[configuration].append(lines[0])
del lines[0]
number_of_lines = max(len(x) for x in cfg2newlines.values())
for newlines in cfg2newlines.values():
add = (number_of_lines - len(newlines))
newlines.extend(['\n'] * add)
return cfg2newlines
def produce_preprocessor(iterable):
if TRACE:
current_line = [0]
def wrap(line):
current_line[0] += 1
dbg('%5d: %s', current_line[0], repr(str(line))[1:-1])
return line
else:
def wrap(line):
return line
state = None
for line in iterable:
key = line.tags# or None
if key == state:
yield wrap(line)
else:
if key.exact_reverse(state):
yield wrap('#else /* %s */\n' % state.format_tags())
else:
if state:
yield wrap('#endif /* %s */\n' % state.format_tags())
if key:
yield wrap('#if %s\n' % key.format_tags())
yield wrap(line)
state = key
if state:
yield wrap('#endif /* %s */\n' % state.format_tags())
class Str(str):
"""This is a string subclass that has a set of tags attached to it.
Used for merging the outputs.
"""
def __new__(cls, string, tags):
if not isinstance(string, str):
raise TypeError('string must be str: %s' % (type(string), ))
if not isinstance(tags, Configuration) and not isinstance(tags, ConfigurationGroups):
raise TypeError("Must be tags or tag groups: %r" % (tags,))
if isinstance(tags, Configuration):
tags = ConfigurationGroups((tags,))
self = str.__new__(cls, string)
self.tags = tags
return self
def __getnewargs__(self):
return str(self), self.tags
def __repr__(self):
return '%s(%s, %r)' % (self.__class__.__name__, str.__repr__(self), self.tags)
def __add__(self, other):
if not isinstance(other, str):
raise TypeError
return self.__class__(str.__add__(self, other), self.tags)
def __radd__(self, other):
if not isinstance(other, str):
raise TypeError
return self.__class__(str.__add__(other, self), self.tags)
methods = ['__getslice__', '__getitem__', '__mul__', '__rmod__', '__rmul__',
'join', 'replace', 'upper', 'lower']
for method in methods:
do_exec('''def %s(self, *args):
return self.__class__(str.%s(self, *args), self.tags)''' % (method, method), locals())
def parse_parameter_names(x):
assert x.startswith('(') and x.endswith(')'), repr(x)
x = x[1:-1]
result = []
for param in x.split(','):
param = param.strip()
if not param_name_re.match(param):
raise SyntaxError('Invalid parameter name: %r' % param)
result.append(param)
return result
def parse_parameter_values(x):
assert x.startswith('(') and x.endswith(')'), repr(x)
x = x[1:-1]
result = []
for param in x.split(','):
result.append(param.strip())
return result
def expand_definitions(code, definitions):
if not definitions:
return code
keys = list(definitions.keys())
keys.sort(key=lambda x: (-len(x), x))
keys = '|'.join(keys)
# This regex defines a macro invocation
re_macro = re.compile(r'(^|##|[^\w])(%s)(\([^)]+\)|$|##|[^w])' % keys)
def repl(m):
token = m.group(2)
definition = definitions[token]
params = definition.get('params', [])
if params:
arguments = m.group(3)
if arguments.startswith('(') and arguments.endswith(')'):
arguments = parse_parameter_values(arguments)
else:
arguments = None
if arguments and len(params) == len(arguments):
local_definitions = {}
trace('Macro %r params=%r arguments=%r source=%r', token, params, arguments, m.groups())
for key, value in zip(params, arguments):
trace('Adding argument %r=%r', key, value)
local_definitions[key] = {'lines': [value]}
result = expand_definitions('\n'.join(definition['lines']), local_definitions)
else:
msg = 'Invalid number of arguments for macro %s: expected %s, got %s'
msg = msg % (token, len(params), len(arguments or []))
raise SyntaxError(msg)
else:
result = '\n'.join(definition['lines'])
if m.group(3) != '##':
result += m.group(3)
if m.group(1) != '##':
result = m.group(1) + result
trace('Replace %r with %r', m.group(0), result)
return result
for _ in range(20000):
newcode, count = re_macro.subn(repl, code, count=1)
if code == newcode:
if count > 0:
raise SyntaxError('Infinite recursion')
return newcode
code = newcode
raise SyntaxError('Too many substitutions or internal error.')
class Str_sourceline(str):
def __new__(cls, source, sourceline):
self = str.__new__(cls, source)
self.sourceline = sourceline
return self
def __getnewargs__(self):
return str(self), self.sourceline
def atomic_write(filename, data):
dirname = os.path.dirname(os.path.abspath(filename))
tmpfd, tmpname = tempfile.mkstemp(dir=dirname, text=True)
with os.fdopen(tmpfd, 'w') as f:
f.write(data)
f.flush()
os.fsync(f.fileno())
if os.path.exists(filename):
os.unlink(filename)
dbg("Renaming %s to %s", tmpname, filename)
try:
os.rename(tmpname, filename)
except:
log("Failed to rename '%s' to '%s", tmpname, filename)
raise
dbg('Wrote %s bytes to %s', len(data), filename)
def run_cython(filename, sourcehash, output_filename, banner, comment, cache=None):
dbg("Cython output to %s hash %s", output_filename, sourcehash)
result = cache.get(sourcehash) if cache is not None else None
# Use an array for the argument so that filename arguments are properly
# quoted according to local convention
command = [CYTHON, '-o', output_filename,
'-I', os.path.join('src', 'gevent', 'libev'),
'-I', os.path.join('src', 'gevent'), # python.pxd, shared with c-ares
filename]
if result is not None:
log('Reusing %s # %s', command, comment)
return result
system(command, comment)
result = postprocess_cython_output(output_filename, banner)
if cache is not None:
cache[sourcehash] = result
return result
def system(command, comment):
command_str = ' '.join(command)
log('Running %s # %s', command_str, comment)
try:
subprocess.check_call(command)
dbg('\tDone running %s # %s', command_str, comment)
except (subprocess.CalledProcessError, OSError):
# Python 2 can raise OSError: No such file or directory
# debugging code
log("Path: %s", os.getenv("PATH"))
bin_dir = os.path.dirname(sys.executable)
bin_files = os.listdir(bin_dir)
bin_files.sort()
log("Bin: %s files: %s", bin_dir, ' '.join(bin_files))
raise
def postprocess_cython_output(filename, banner):
# this does a few things:
# 1) converts multiline C-style (/**/) comments with a single line comment by
# replacing \n with newline_token
# 2) adds our header
# 3) remove timestamp in cython's header so that different timestamps do not
# confuse merger
result = ['/* %s */\n' % (banner)]
with open(filename) as finput:
firstline = finput.readline()
m = cython_header_re.match(firstline.strip())
if m:
result.append('/* %s */' % m.group(1))
else:
result.append(firstline)
in_comment = False
for line in finput:
if line.endswith('\n'):
line = line[:-1].rstrip() + '\n'
if in_comment:
if '*/' in line:
in_comment = False
result.append(line)
else:
result.append(line.replace('\n', newline_token))
else:
if line.lstrip().startswith('/* ') and '*/' not in line:
line = line.lstrip() # cython adds space before /* for some reason
line = line.replace('\n', newline_token)
result.append(line)
in_comment = True
else:
result.append(line)
return ''.join(result)
def log(message, *args):
try:
string = message % args
except Exception:
try:
prefix = 'Traceback (most recent call last):\n'
lines = traceback.format_stack()[:-1]
error_lines = traceback.format_exc().replace(prefix, '')
last_length = len(lines[-1].strip().rsplit(' ', 1)[-1])
last_length = min(80, last_length)
last_length = max(5, last_length)
msg = '%s%s %s\n%s' % (prefix, ''.join(lines), '^' * last_length, error_lines)
sys.stderr.write(msg)
except Exception:
traceback.print_exc()
try:
message = '%r %% %r\n\n' % (message, args)
except Exception:
pass
try:
sys.stderr.write(message)
except Exception:
traceback.print_exc()
else:
print(string, file=sys.stderr)
if DEBUG:
dbg = log
else:
def dbg(*_):
return
if TRACE:
trace = log
else:
def trace(*_):
return
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--debug', action='store_true')
parser.add_argument('--list', action='store_true', help='Show the list of different conditions')
parser.add_argument('--list-cond', action='store_true')
parser.add_argument('--ignore-cond', action='store_true', help='Ignore conditional directives (only expand definitions)')
parser.add_argument('--write-intermediate', action='store_true', help='Save intermediate files produced by preprocessor and Cython')
parser.add_argument('-o', '--output-file', help='Specify name of generated C file')
# TODO: Derive the module name automatically from the input filename relative to the base
# dir.
parser.add_argument('--module-name', help="specify name of .pyx module")
parser.add_argument("input")
options = parser.parse_args()
filename = options.input
if options.debug:
global DEBUG
DEBUG = True
if options.write_intermediate:
global WRITE_OUTPUT
WRITE_OUTPUT = True
run = True
if options.list_cond:
run = False
for x in Configuration.get_configurations(filename):
print("* ", x)
if options.list:
run = False
for x in Configuration.get_complete_configurations(filename):
print("* ", x)
if options.ignore_cond:
run = False
class FakeConfig(object):
def is_condition_true(*args):
return False
sys.stdout.write(preprocess_filename(filename, FakeConfig()))
if run:
process_filename(filename, options.output_file, options.module_name)
if __name__ == '__main__':
main()
#!/bin/bash
set -e
CWD=`pwd`
rm -fr /tmp/build_gevent_deb
set -x
mkdir /tmp/build_gevent_deb
#util/makedist.py --dest /tmp/build_gevent_deb/gevent.tar.gz --version dev
cd /tmp/build_gevent_deb
tar -xf $CWD/dist/gevent-1.0.tar.gz
fpm --no-python-dependencies -s python -t deb gevent*/setup.py
mkdir -p $CWD/build
mv *.deb $CWD/build/
#!/usr/bin/python
# Copyright (C) 2012 Denis Bilenko (http://denisbilenko.com)
"""
Create a source distribution of gevent.
Does the following:
- Clones the repo into a temporary location.
- Run set_version.py that will update gevent/__init__.py.
- Run 'python setup.py sdist'.
"""
from __future__ import print_function
import sys
import os
import glob
import argparse
from os.path import exists, join, abspath, basename
from pipes import quote
TMPDIR = '/tmp/gevent-make-dist'
def system(cmd, noisy=True):
if noisy:
print(cmd)
res = os.system(cmd)
if res:
sys.exit('%r failed with %s' % (cmd, res))
def makedist(*args, **kwargs):
cwd = os.getcwd()
try:
return _makedist(*args, **kwargs)
finally:
os.chdir(cwd)
def _makedist(version=None, dest=None):
assert exists('gevent/__init__.py'), 'Where am I?'
basedir = abspath(os.getcwd())
version = version or 'dev'
set_version_command = 'util/set_version.py --version %s ./gevent/__init__.py' % version
os.chdir('/tmp')
system('rm -fr ' + TMPDIR)
os.mkdir(TMPDIR)
os.chdir(TMPDIR)
system('git clone %s gevent' % basedir)
directory = os.listdir('.')
assert len(directory) == 1, directory
os.chdir(directory[0])
system('git branch')
system(set_version_command)
system('git diff', noisy=False)
system('python setup.py -q sdist')
dist_filename = glob.glob('dist/gevent-*.tar.gz')
assert len(dist_filename) == 1, dist_filename
dist_path = abspath(dist_filename[0])
dist_filename = basename(dist_path)
if dest:
if os.path.isdir(dest):
dest = join(dest, dist_filename)
else:
if not exists(join(basedir, 'dist')):
os.mkdir(join(basedir, 'dist'))
dest = join(basedir, 'dist', dist_filename)
copy(dist_path, dest)
return dist_path
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--dest')
parser.add_argument('--version')
options = parser.parse_args()
return makedist(options.version, dest=options.dest)
def copy(source, dest):
system('cp -a %s %s' % (quote(source), quote(dest)))
if __name__ == '__main__':
main()
#!/usr/bin/python
"""Update __version__, version_info and add __changeset__.
'dev' in version_info should be replaced with alpha|beta|candidate|final
'dev' in __version__ should be replaced with a|b|rc|<empty string>
"""
from __future__ import print_function
import sys
import os
import re
from argparse import ArgumentParser
from distutils.version import LooseVersion
version_re = re.compile("^__version__\s*=\s*'([^']+)'", re.M)
version_info_re = re.compile(r"^version_info\s*=\s*([^\n]+)", re.M)
strict_version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', re.VERBOSE)
def read(command):
popen = os.popen(command)
data = popen.read()
retcode = popen.close()
if retcode:
sys.exit('Failed (%s) to run %r' % (retcode, command))
return data.strip()
def get_changeset():
return read('git describe --tags --always --dirty --long')
def get_version_info(version):
"""
>>> get_version_info('0.13.6')
(0, 13, 6, 'final', 0)
>>> get_version_info('1.1')
(1, 1, 0, 'final', 0)
>>> get_version_info('1')
(1, 0, 0, 'final', 0)
>>> get_version_info('1.0dev1')
(1, 0, 0, 'dev', 1)
>>> get_version_info('1.0a3')
(1, 0, 0, 'alpha', 3)
>>> get_version_info('1.0rc1')
(1, 0, 0, 'candidate', 1)
"""
repl = {'a': 'alpha',
'b': 'beta',
'rc': 'candidate',
'dev': 'dev'}
components = LooseVersion(version).version
result = []
for component in components:
if isinstance(component, int):
result.append(component)
else:
while len(result) < 3:
result.append(0)
component = repl[component]
result.append(component)
while len(result) < 3:
result.append(0)
if len(result) == 3:
result.append('final')
result.append(0)
return tuple(result)
def modify_version(filename, new_version):
# return (current_contents, modified_contents, is_match)
original_data = open(filename).read()
assert '__changeset__' not in original_data, 'Must revert the old update first'
data = original_data
if new_version:
new_version_info = get_version_info(new_version)
def repl_version_info(m):
return 'version_info = %s' % (new_version_info, )
data, count = version_info_re.subn(repl_version_info, data)
if not count:
raise AssertionError('version_info not found in %s' % filename)
if count != 1:
raise AssertionError('version_info found more than once in %s' % filename)
def repl_version(m):
result = m.group(0).replace(m.group(1), new_version or m.group(1))
result += "\n__changeset__ = '%s'" % get_changeset()
return result
data, count = version_re.subn(repl_version, data)
if not count:
raise AssertionError('__version__ not found in %s' % filename)
if count != 1:
raise AssertionError('__version__ found more than once in %s' % filename)
return original_data, data
def unlink(path):
try:
os.unlink(path)
except OSError as ex:
if ex.errno == 2: # No such file or directory
return
raise
def write(filename, data):
# intentionally breaking links here so that util/makedist.py can use "cp --link"
tmpname = filename + '.tmp.%s' % os.getpid()
f = open(tmpname, 'w')
try:
f.write(data)
f.flush()
os.fsync(f.fileno())
f.close()
os.rename(tmpname, filename)
except:
unlink(tmpname)
raise
def main():
global options
parser = ArgumentParser()
parser.add_argument('--version', default='dev')
parser.add_argument('--dry-run', action='store_true')
parser.add_argument('filename')
options = parser.parse_args()
version = options.version
if version.lower() == 'dev':
version = ''
if version and strict_version_re.match(version) is None:
sys.stderr.write('WARNING: Not a strict version: %r (bdist_msi will fail)' % version)
original_content, new_content = modify_version(options.filename, version)
if options.dry_run:
tmpname = '/tmp/' + os.path.basename(options.filename) + '.set_version'
write(tmpname, new_content)
if not os.system('diff -u %s %s' % (options.filename, tmpname)):
sys.exit('No differences applied')
else:
write(options.filename, new_content)
print('Updated %s' % options.filename)
if __name__ == '__main__':
main()
#!/usr/bin/python -u
"""
Unix utilities must be installed on target machine for this to work: http://unxutils.sourceforge.net/
"""
import sys
import os
import argparse
def system(cmd, exit=True):
sys.stderr.write('+ %s\n' % cmd)
retcode = os.system(cmd)
if retcode:
if exit:
sys.exit('%r failed' % cmd)
return retcode
parser = argparse.ArgumentParser()
parser.add_argument('--host')
parser.add_argument('--username', default='Administrator')
parser.add_argument('--source')
parser.add_argument('--dist', action='store_true')
parser.add_argument('--python', default='27')
parser.add_argument('args', nargs='*')
options = parser.parse_args()
args = options.args
def prepare():
source_name = args[1]
tar_name = source_name.rsplit('.', 1)[0]
dir_name = tar_name.rsplit('.', 1)[0]
system('rm -fr %s %s' % (tar_name, dir_name))
system('gzip -d %s && tar -xf %s' % (source_name, tar_name))
os.chdir(dir_name)
os.environ.setdefault('VS90COMNTOOLS', 'C:\\Program Files\\Microsoft Visual Studio 10.0\\Common7\Tools\\')
if args[0:1] == ['test']:
prepare()
system('%s setup.py build' % sys.executable)
os.chdir('greentest')
os.environ['PYTHONPATH'] = '.;..;../..'
system('%s testrunner.py --config ../known_failures.py' % sys.executable)
elif args[0:1] == ['dist']:
prepare()
success = 0
for command in ['bdist_egg', 'bdist_wininst', 'bdist_msi']:
cmd = sys.executable + ' setup.py ' + command
if not system(cmd, exit=False):
success += 1
if not success:
sys.exit('bdist_egg bdist_wininst and bdist_msi all failed')
elif not args:
assert options.host
if not options.source:
import makedist
options.source = makedist.makedist()
options.source_name = os.path.basename(options.source)
options.script_path = os.path.abspath(__file__)
options.script_name = os.path.basename(__file__)
if options.python.isdigit():
options.python = 'C:/Python' + options.python + '/python.exe'
tar_name = options.source_name.rsplit('.', 1)[0]
dir_name = tar_name.rsplit('.', 1)[0]
options.dir_name = dir_name
system('scp %(source)s %(script_path)s %(username)s@%(host)s:' % options.__dict__)
if options.dist:
system('ssh %(username)s@%(host)s %(python)s -u %(script_name)s dist %(source_name)s' % options.__dict__)
try:
os.mkdir('dist')
except OSError:
pass
system('scp -r %(username)s@%(host)s:%(dir_name)s/dist/ dist' % options.__dict__)
else:
system('ssh %(username)s@%(host)s C:/Python27/python.exe -u %(script_name)s test %(source_name)s' % options.__dict__)
else:
sys.exit('Invalid args: %r' % (args, ))
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment