Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
C
cython
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Gwenaël Samain
cython
Commits
0907d71f
Commit
0907d71f
authored
Feb 09, 2019
by
jbrockmendel
Committed by
Stefan Behnel
Feb 09, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
CLN: Remove unused (#2830)
Remove unused code, mostly from Plex.
parent
ebc9b746
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
2 additions
and
226 deletions
+2
-226
Cython/Compiler/Main.py
Cython/Compiler/Main.py
+0
-24
Cython/Plex/Errors.py
Cython/Plex/Errors.py
+0
-4
Cython/Plex/Lexicons.py
Cython/Plex/Lexicons.py
+2
-17
Cython/Plex/Timing.py
Cython/Plex/Timing.py
+0
-28
Cython/Plex/Traditional.py
Cython/Plex/Traditional.py
+0
-153
No files found.
Cython/Compiler/Main.py
View file @
0907d71f
...
...
@@ -39,21 +39,6 @@ module_name_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_
verbose
=
0
class
CompilationData
(
object
):
# Bundles the information that is passed from transform to transform.
# (For now, this is only)
# While Context contains every pxd ever loaded, path information etc.,
# this only contains the data related to a single compilation pass
#
# pyx ModuleNode Main code tree of this compilation.
# pxds {string : ModuleNode} Trees for the pxds used in the pyx.
# codewriter CCodeWriter Where to output final code.
# options CompilationOptions
# result CompilationResult
pass
class
Context
(
object
):
# This class encapsulates the context needed for compiling
# one or more Cython implementation files along with their
...
...
@@ -125,15 +110,6 @@ class Context(object):
self
.
_interned
[
key
]
=
value
return
value
def
intern_value
(
self
,
value
,
*
key
):
key
=
(
type
(
value
),
value
)
+
key
try
:
return
self
.
_interned
[
key
]
except
KeyError
:
pass
self
.
_interned
[
key
]
=
value
return
value
# pipeline creation functions can now be found in Pipeline.py
def
process_pxd
(
self
,
source_desc
,
scope
,
module_name
):
...
...
Cython/Plex/Errors.py
View file @
0907d71f
...
...
@@ -17,10 +17,6 @@ class PlexValueError(PlexError, ValueError):
pass
class
InvalidRegex
(
PlexError
):
pass
class
InvalidToken
(
PlexError
):
def
__init__
(
self
,
token_number
,
message
):
PlexError
.
__init__
(
self
,
"Token number %d: %s"
%
(
token_number
,
message
))
...
...
Cython/Plex/Lexicons.py
View file @
0907d71f
...
...
@@ -109,14 +109,9 @@ class Lexicon(object):
machine
=
None
# Machine
tables
=
None
# StateTableMachine
def
__init__
(
self
,
specifications
,
debug
=
None
,
debug_flags
=
7
,
timings
=
None
):
def
__init__
(
self
,
specifications
,
debug
=
None
,
debug_flags
=
7
):
if
not
isinstance
(
specifications
,
list
):
raise
Errors
.
InvalidScanner
(
"Scanner definition is not a list"
)
if
timings
:
from
.Timing
import
time
total_time
=
0.0
time1
=
time
()
nfa
=
Machines
.
Machine
()
default_initial_state
=
nfa
.
new_initial_state
(
''
)
...
...
@@ -138,25 +133,15 @@ class Lexicon(object):
token_number
,
"Expected a token definition (tuple) or State instance"
)
if
timings
:
time2
=
time
()
total_time
=
total_time
+
(
time2
-
time1
)
time3
=
time
()
if
debug
and
(
debug_flags
&
1
):
debug
.
write
(
"
\
n
============= NFA ===========
\
n
"
)
nfa
.
dump
(
debug
)
dfa
=
DFA
.
nfa_to_dfa
(
nfa
,
debug
=
(
debug_flags
&
3
)
==
3
and
debug
)
if
timings
:
time4
=
time
()
total_time
=
total_time
+
(
time4
-
time3
)
if
debug
and
(
debug_flags
&
2
):
debug
.
write
(
"
\
n
============= DFA ===========
\
n
"
)
dfa
.
dump
(
debug
)
if
timings
:
timings
.
write
(
"Constructing NFA : %5.2f
\
n
"
%
(
time2
-
time1
))
timings
.
write
(
"Converting to DFA: %5.2f
\
n
"
%
(
time4
-
time3
))
timings
.
write
(
"TOTAL : %5.2f
\
n
"
%
total_time
)
self
.
machine
=
dfa
...
...
Cython/Plex/Timing.py
deleted
100644 → 0
View file @
ebc9b746
"""
Get time in platform-dependent way
"""
from
__future__
import
absolute_import
import
os
from
sys
import
platform
,
exit
,
stderr
if
platform
==
'mac'
:
import
MacOS
def
time
():
return
MacOS
.
GetTicks
()
/
60.0
timekind
=
"real"
elif
hasattr
(
os
,
'times'
):
def
time
():
t
=
os
.
times
()
return
t
[
0
]
+
t
[
1
]
timekind
=
"cpu"
else
:
stderr
.
write
(
"Don't know how to get time on platform %s
\
n
"
%
repr
(
platform
))
exit
(
1
)
Cython/Plex/Traditional.py
deleted
100644 → 0
View file @
ebc9b746
"""
Python Lexical Analyser
Traditional Regular Expression Syntax
"""
from
__future__
import
absolute_import
from
.Regexps
import
Alt
,
Seq
,
Rep
,
Rep1
,
Opt
,
Any
,
AnyBut
,
Bol
,
Eol
,
Char
from
.Errors
import
PlexError
class
RegexpSyntaxError
(
PlexError
):
pass
def
re
(
s
):
"""
Convert traditional string representation of regular expression |s|
into Plex representation.
"""
return
REParser
(
s
).
parse_re
()
class
REParser
(
object
):
def
__init__
(
self
,
s
):
self
.
s
=
s
self
.
i
=
-
1
self
.
end
=
0
self
.
next
()
def
parse_re
(
self
):
re
=
self
.
parse_alt
()
if
not
self
.
end
:
self
.
error
(
"Unexpected %s"
%
repr
(
self
.
c
))
return
re
def
parse_alt
(
self
):
"""Parse a set of alternative regexps."""
re
=
self
.
parse_seq
()
if
self
.
c
==
'|'
:
re_list
=
[
re
]
while
self
.
c
==
'|'
:
self
.
next
()
re_list
.
append
(
self
.
parse_seq
())
re
=
Alt
(
*
re_list
)
return
re
def
parse_seq
(
self
):
"""Parse a sequence of regexps."""
re_list
=
[]
while
not
self
.
end
and
self
.
c
not
in
"|)"
:
re_list
.
append
(
self
.
parse_mod
())
return
Seq
(
*
re_list
)
def
parse_mod
(
self
):
"""Parse a primitive regexp followed by *, +, ? modifiers."""
re
=
self
.
parse_prim
()
while
not
self
.
end
and
self
.
c
in
"*+?"
:
if
self
.
c
==
'*'
:
re
=
Rep
(
re
)
elif
self
.
c
==
'+'
:
re
=
Rep1
(
re
)
else
:
# self.c == '?'
re
=
Opt
(
re
)
self
.
next
()
return
re
def
parse_prim
(
self
):
"""Parse a primitive regexp."""
c
=
self
.
get
()
if
c
==
'.'
:
re
=
AnyBut
(
"
\
n
"
)
elif
c
==
'^'
:
re
=
Bol
elif
c
==
'$'
:
re
=
Eol
elif
c
==
'('
:
re
=
self
.
parse_alt
()
self
.
expect
(
')'
)
elif
c
==
'['
:
re
=
self
.
parse_charset
()
self
.
expect
(
']'
)
else
:
if
c
==
'
\
\
'
:
c
=
self
.
get
()
re
=
Char
(
c
)
return
re
def
parse_charset
(
self
):
"""Parse a charset. Does not include the surrounding []."""
char_list
=
[]
invert
=
0
if
self
.
c
==
'^'
:
invert
=
1
self
.
next
()
if
self
.
c
==
']'
:
char_list
.
append
(
']'
)
self
.
next
()
while
not
self
.
end
and
self
.
c
!=
']'
:
c1
=
self
.
get
()
if
self
.
c
==
'-'
and
self
.
lookahead
(
1
)
!=
']'
:
self
.
next
()
c2
=
self
.
get
()
for
a
in
range
(
ord
(
c1
),
ord
(
c2
)
+
1
):
char_list
.
append
(
chr
(
a
))
else
:
char_list
.
append
(
c1
)
chars
=
''
.
join
(
char_list
)
if
invert
:
return
AnyBut
(
chars
)
else
:
return
Any
(
chars
)
def
next
(
self
):
"""Advance to the next char."""
s
=
self
.
s
i
=
self
.
i
=
self
.
i
+
1
if
i
<
len
(
s
):
self
.
c
=
s
[
i
]
else
:
self
.
c
=
''
self
.
end
=
1
def
get
(
self
):
if
self
.
end
:
self
.
error
(
"Premature end of string"
)
c
=
self
.
c
self
.
next
()
return
c
def
lookahead
(
self
,
n
):
"""Look ahead n chars."""
j
=
self
.
i
+
n
if
j
<
len
(
self
.
s
):
return
self
.
s
[
j
]
else
:
return
''
def
expect
(
self
,
c
):
"""
Expect to find character |c| at current position.
Raises an exception otherwise.
"""
if
self
.
c
==
c
:
self
.
next
()
else
:
self
.
error
(
"Missing %s"
%
repr
(
c
))
def
error
(
self
,
mess
):
"""Raise exception to signal syntax error in regexp."""
raise
RegexpSyntaxError
(
"Syntax error in regexp %s at position %d: %s"
%
(
repr
(
self
.
s
),
self
.
i
,
mess
))
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment