Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Z
Zope
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Kirill Smelkov
Zope
Commits
05e980af
Commit
05e980af
authored
Feb 13, 2001
by
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Merged fix for bug #1896 from 2.3 branch
parent
4262f14a
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
1398 additions
and
1396 deletions
+1398
-1396
ZServer/medusa/http_server.py
ZServer/medusa/http_server.py
+699
-698
lib/python/ZServer/medusa/http_server.py
lib/python/ZServer/medusa/http_server.py
+699
-698
No files found.
ZServer/medusa/http_server.py
View file @
05e980af
#! /usr/local/bin/python
#! /usr/local/bin/python
# -*- Mode: Python; tab-width: 4 -*-
# -*- Mode: Python; tab-width: 4 -*-
#
#
#
Author: Sam Rushing <rushing@nightmare.com>
#
Author: Sam Rushing <rushing@nightmare.com>
#
Copyright 1996-2000 by Sam Rushing
#
Copyright 1996-2000 by Sam Rushing
#
All Rights Reserved.
#
All Rights Reserved.
#
#
RCS_ID
=
'$Id: http_server.py,v 1.
19 2000/08/23 20:39:26
brian Exp $'
RCS_ID
=
'$Id: http_server.py,v 1.
20 2001/02/13 21:18:54
brian Exp $'
# python modules
# python modules
import
os
import
os
...
@@ -33,648 +33,649 @@ VERSION_STRING = string.split(RCS_ID)[2]
...
@@ -33,648 +33,649 @@ VERSION_STRING = string.split(RCS_ID)[2]
from
counter
import
counter
from
counter
import
counter
# ===========================================================================
# ===========================================================================
#
Request Object
#
Request Object
# ===========================================================================
# ===========================================================================
class
http_request
:
class
http_request
:
# default reply code
# default reply code
reply_code
=
200
reply_code
=
200
request_counter
=
counter
()
request_counter
=
counter
()
# Whether to automatically use chunked encoding when
# Whether to automatically use chunked encoding when
#
#
# HTTP version is 1.1
# HTTP version is 1.1
# Content-Length is not set
# Content-Length is not set
# Chunked encoding is not already in effect
# Chunked encoding is not already in effect
#
#
# If your clients are having trouble, you might want to disable this.
# If your clients are having trouble, you might want to disable this.
use_chunked
=
1
use_chunked
=
1
# by default, this request object ignores user data.
# by default, this request object ignores user data.
collector
=
None
collector
=
None
def
__init__
(
self
,
*
args
):
def
__init__
(
self
,
*
args
):
# unpack information about the request
# unpack information about the request
(
self
.
channel
,
self
.
request
,
(
self
.
channel
,
self
.
request
,
self
.
command
,
self
.
uri
,
self
.
version
,
self
.
command
,
self
.
uri
,
self
.
version
,
self
.
header
)
=
args
self
.
header
)
=
args
self
.
outgoing
=
fifo
()
self
.
outgoing
=
fifo
()
self
.
reply_headers
=
{
self
.
reply_headers
=
{
'Server'
:
'Medusa/%s'
%
VERSION_STRING
,
'Server'
:
'Medusa/%s'
%
VERSION_STRING
,
'Date'
:
http_date
.
build_http_date
(
time
.
time
())
'Date'
:
http_date
.
build_http_date
(
time
.
time
())
}
}
self
.
request_number
=
http_request
.
request_counter
.
increment
()
self
.
request_number
=
http_request
.
request_counter
.
increment
()
self
.
_split_uri
=
None
self
.
_split_uri
=
None
self
.
_header_cache
=
{}
self
.
_header_cache
=
{}
# --------------------------------------------------
# --------------------------------------------------
# reply header management
# reply header management
# --------------------------------------------------
# --------------------------------------------------
def
__setitem__
(
self
,
key
,
value
):
def
__setitem__
(
self
,
key
,
value
):
self
.
reply_headers
[
key
]
=
value
self
.
reply_headers
[
key
]
=
value
def
__getitem__
(
self
,
key
):
def
__getitem__
(
self
,
key
):
return
self
.
reply_headers
[
key
]
return
self
.
reply_headers
[
key
]
def
has_key
(
self
,
key
):
def
has_key
(
self
,
key
):
return
self
.
reply_headers
.
has_key
(
key
)
return
self
.
reply_headers
.
has_key
(
key
)
def
build_reply_header
(
self
):
def
build_reply_header
(
self
):
return
string
.
join
(
return
string
.
join
(
[
self
.
response
(
self
.
reply_code
)]
+
map
(
[
self
.
response
(
self
.
reply_code
)]
+
map
(
lambda
x
:
'%s: %s'
%
x
,
lambda
x
:
'%s: %s'
%
x
,
self
.
reply_headers
.
items
()
self
.
reply_headers
.
items
()
),
),
'
\
r
\
n
'
'
\
r
\
n
'
)
+
'
\
r
\
n
\
r
\
n
'
)
+
'
\
r
\
n
\
r
\
n
'
# --------------------------------------------------
# --------------------------------------------------
# split a uri
# split a uri
# --------------------------------------------------
# --------------------------------------------------
# <path>;<params>?<query>#<fragment>
# <path>;<params>?<query>#<fragment>
path_regex
=
regex
.
compile
(
path_regex
=
regex
.
compile
(
# path params query fragment
# path params query fragment
'
\
\
([^;?#]*
\
\
)
\
\
(;[^?#]*
\
\
)?
\
\
(
\
\
?[^#]*
\
)?
\
(#.*
\
)?
'
'
\
\
([^;?#]*
\
\
)
\
\
(;[^?#]*
\
\
)?
\
\
(
\
\
?[^#]*
\
)?
\
(#.*
\
)?
'
)
)
def split_uri (self):
def split_uri (self):
if self._split_uri is None:
if self._split_uri is None:
if self.path_regex.match (self.uri) != len(self.uri):
if self.path_regex.match (self.uri) != len(self.uri):
raise ValueError, "Broken URI"
raise ValueError, "Broken URI"
else:
else:
self._split_uri = map (lambda i,r=self.path_regex: r.group(i), range(1,5))
self._split_uri = map (lambda i,r=self.path_regex: r.group(i), range(1,5))
return self._split_uri
return self._split_uri
def get_header_with_regex (self, head_reg, group):
def get_header_with_regex (self, head_reg, group):
for line in self.header:
for line in self.header:
if head_reg.match (line) == len(line):
if head_reg.match (line) == len(line):
return head_reg.group(group)
return head_reg.group(group)
return ''
return ''
def get_header (self, header):
def get_header (self, header):
header = string.lower (header)
header = string.lower (header)
hc = self._header_cache
hc = self._header_cache
if not hc.has_key (header):
if not hc.has_key (header):
h = header + '
:
'
h = header + '
:
'
hl = len(h)
hl = len(h)
for line in self.header:
for line in self.header:
if string.lower (line[:hl]) == h:
if string.lower (line[:hl]) == h:
r = line[hl:]
r = line[hl:]
hc[header] = r
hc[header] = r
return r
return r
hc[header] = None
hc[header] = None
return None
return None
else:
else:
return hc[header]
return hc[header]
# --------------------------------------------------
# --------------------------------------------------
# user data
# user data
# --------------------------------------------------
# --------------------------------------------------
def collect_incoming_data (self, data):
def collect_incoming_data (self, data):
if self.collector:
if self.collector:
self.collector.collect_incoming_data (data)
self.collector.collect_incoming_data (data)
else:
else:
self.log_info(
self.log_info(
'
Dropping
%
d
bytes
of
incoming
request
data
' % len(data),
'
Dropping
%
d
bytes
of
incoming
request
data
' % len(data),
'
warning
'
'
warning
'
)
)
def found_terminator (self):
def found_terminator (self):
if self.collector:
if self.collector:
self.collector.found_terminator()
self.collector.found_terminator()
else:
else:
self.log_info (
self.log_info (
'
Unexpected
end
-
of
-
record
for
incoming
request
',
'
Unexpected
end
-
of
-
record
for
incoming
request
',
'
warning
'
'
warning
'
)
)
def push (self, thing):
def push (self, thing):
if type(thing) == type(''):
if type(thing) == type(''):
self.outgoing.push (producers.simple_producer (thing))
self.outgoing.push (producers.simple_producer (thing))
else:
else:
self.outgoing.push (thing)
self.outgoing.push (thing)
def response (self, code=200):
def response (self, code=200):
message = self.responses[code]
message = self.responses[code]
self.reply_code = code
self.reply_code = code
return '
HTTP
/%
s
%
d
%
s
' % (self.version, code, message)
return '
HTTP
/%
s
%
d
%
s
' % (self.version, code, message)
def error (self, code):
def error (self, code):
self.reply_code = code
self.reply_code = code
message = self.responses[code]
message = self.responses[code]
s = self.DEFAULT_ERROR_MESSAGE % {
s = self.DEFAULT_ERROR_MESSAGE % {
'
code
': code,
'
code
': code,
'
message
': message,
'
message
': message,
}
}
self['
Content
-
Length
'] = len(s)
self['
Content
-
Length
'] = len(s)
self['
Content
-
Type
'] = '
text
/
html
'
self['
Content
-
Type
'] = '
text
/
html
'
# make an error reply
# make an error reply
self.push (s)
self.push (s)
self.done()
self.done()
# can also be used for empty replies
# can also be used for empty replies
reply_now = error
reply_now = error
def done (self):
def done (self):
"finalize this transaction - send output to the http channel"
"finalize this transaction - send output to the http channel"
# ----------------------------------------
# ----------------------------------------
# persistent connection management
# persistent connection management
# ----------------------------------------
# ----------------------------------------
# --- BUCKLE UP! ----
# --- BUCKLE UP! ----
connection = string.lower (get_header (CONNECTION, self.header))
connection = string.lower (get_header (CONNECTION, self.header))
close_it = 0
close_it = 0
wrap_in_chunking = 0
wrap_in_chunking = 0
if self.version == '
1.0
':
if self.version == '
1.0
':
if connection == '
keep
-
alive
':
if connection == '
keep
-
alive
':
if not self.has_key ('
Content
-
Length
'):
if not self.has_key ('
Content
-
Length
'):
close_it = 1
close_it = 1
else:
else:
self['
Connection
'] = '
Keep
-
Alive
'
self['
Connection
'] = '
Keep
-
Alive
'
else:
else:
close_it = 1
close_it = 1
elif self.version == '
1.1
':
elif self.version == '
1.1
':
if connection == '
close
':
if connection == '
close
':
close_it = 1
close_it = 1
elif not self.has_key ('
Content
-
Length
'):
elif not self.has_key ('
Content
-
Length
'):
if self.has_key ('
Transfer
-
Encoding
'):
if self.has_key ('
Transfer
-
Encoding
'):
if not self['
Transfer
-
Encoding
'] == '
chunked
':
if not self['
Transfer
-
Encoding
'] == '
chunked
':
close_it = 1
close_it = 1
elif self.use_chunked:
elif self.use_chunked:
self['
Transfer
-
Encoding
'] = '
chunked
'
self['
Transfer
-
Encoding
'] = '
chunked
'
wrap_in_chunking = 1
wrap_in_chunking = 1
else:
else:
close_it = 1
close_it = 1
elif self.version is None:
elif self.version is None:
# Although we don'
t
*
really
*
support
http
/
0.9
(
because
we
'd have to
# Although we don'
t
*
really
*
support
http
/
0.9
(
because
we
'd have to
# use
\
r
\
n
as a terminator, and it would just yuck up a lot of stuff)
# use
\
r
\
n
as a terminator, and it would just yuck up a lot of stuff)
# it'
s
very
common
for
developers
to
not
want
to
type
a
version
number
# it'
s
very
common
for
developers
to
not
want
to
type
a
version
number
# when using telnet to debug a server.
# when using telnet to debug a server.
close_it
=
1
close_it
=
1
outgoing_header
=
producers
.
simple_producer
(
self
.
build_reply_header
())
outgoing_header
=
producers
.
simple_producer
(
self
.
build_reply_header
())
if
close_it
:
if
close_it
:
self
[
'Connection'
]
=
'close'
self
[
'Connection'
]
=
'close'
if
wrap_in_chunking
:
if
wrap_in_chunking
:
outgoing_producer
=
producers
.
chunked_producer
(
outgoing_producer
=
producers
.
chunked_producer
(
producers
.
composite_producer
(
self
.
outgoing
)
producers
.
composite_producer
(
self
.
outgoing
)
)
)
# prepend the header
# prepend the header
outgoing_producer
=
producers
.
composite_producer
(
outgoing_producer
=
producers
.
composite_producer
(
fifo
([
outgoing_header
,
outgoing_producer
])
fifo
([
outgoing_header
,
outgoing_producer
])
)
)
else
:
else
:
# prepend the header
# prepend the header
self
.
outgoing
.
push_front
(
outgoing_header
)
self
.
outgoing
.
push_front
(
outgoing_header
)
outgoing_producer
=
producers
.
composite_producer
(
self
.
outgoing
)
outgoing_producer
=
producers
.
composite_producer
(
self
.
outgoing
)
# apply a few final transformations to the output
# apply a few final transformations to the output
self
.
channel
.
push_with_producer
(
self
.
channel
.
push_with_producer
(
# globbing gives us large packets
# globbing gives us large packets
producers
.
globbing_producer
(
producers
.
globbing_producer
(
# hooking lets us log the number of bytes sent
# hooking lets us log the number of bytes sent
producers
.
hooked_producer
(
producers
.
hooked_producer
(
outgoing_producer
,
outgoing_producer
,
self
.
log
self
.
log
)
)
)
)
)
)
self
.
channel
.
current_request
=
None
self
.
channel
.
current_request
=
None
if
close_it
:
if
close_it
:
self
.
channel
.
close_when_done
()
self
.
channel
.
close_when_done
()
def
log_date_string
(
self
,
when
):
def
log_date_string
(
self
,
when
):
return
time
.
strftime
(
logtime
=
time
.
localtime
(
when
)
'%d/%b/%Y:%H:%M:%S '
,
return
time
.
strftime
(
'%d/'
,
logtime
)
+
\
time
.
localtime
(
when
)
http_date
.
monthname
[
logtime
[
1
]]
+
\
)
+
tz_for_log
time
.
strftime
(
'/%Y:%H:%M:%S '
,
logtime
)
+
\
tz_for_log
def
log
(
self
,
bytes
):
user_agent
=
self
.
get_header
(
'user-agent'
)
def
log
(
self
,
bytes
):
if
not
user_agent
:
user_agent
=
''
user_agent
=
self
.
get_header
(
'user-agent'
)
referer
=
self
.
get_header
(
'referer'
)
if
not
user_agent
:
user_agent
=
''
if
not
referer
:
referer
=
''
referer
=
self
.
get_header
(
'referer'
)
self
.
channel
.
server
.
logger
.
log
(
if
not
referer
:
referer
=
''
self
.
channel
.
addr
[
0
],
self
.
channel
.
server
.
logger
.
log
(
' - - [%s] "%s" %d %d "%s" "%s"
\
n
'
%
(
self
.
channel
.
addr
[
0
],
# self.channel.addr[1],
' - - [%s] "%s" %d %d "%s" "%s"
\
n
'
%
(
self
.
log_date_string
(
time
.
time
()),
# self.channel.addr[1],
self
.
request
,
self
.
log_date_string
(
time
.
time
()),
self
.
reply_code
,
self
.
request
,
bytes
,
self
.
reply_code
,
referer
,
bytes
,
user_agent
referer
,
)
user_agent
)
)
)
responses
=
{
100
:
"Continue"
,
responses
=
{
101
:
"Switching Protocols"
,
100
:
"Continue"
,
200
:
"OK"
,
101
:
"Switching Protocols"
,
201
:
"Created"
,
200
:
"OK"
,
202
:
"Accepted"
,
201
:
"Created"
,
203
:
"Non-Authoritative Information"
,
202
:
"Accepted"
,
204
:
"No Content"
,
203
:
"Non-Authoritative Information"
,
205
:
"Reset Content"
,
204
:
"No Content"
,
206
:
"Partial Content"
,
205
:
"Reset Content"
,
300
:
"Multiple Choices"
,
206
:
"Partial Content"
,
301
:
"Moved Permanently"
,
300
:
"Multiple Choices"
,
302
:
"Moved Temporarily"
,
301
:
"Moved Permanently"
,
303
:
"See Other"
,
302
:
"Moved Temporarily"
,
304
:
"Not Modified"
,
303
:
"See Other"
,
305
:
"Use Proxy"
,
304
:
"Not Modified"
,
400
:
"Bad Request"
,
305
:
"Use Proxy"
,
401
:
"Unauthorized"
,
400
:
"Bad Request"
,
402
:
"Payment Required"
,
401
:
"Unauthorized"
,
403
:
"Forbidden"
,
402
:
"Payment Required"
,
404
:
"Not Found"
,
403
:
"Forbidden"
,
405
:
"Method Not Allowed"
,
404
:
"Not Found"
,
406
:
"Not Acceptable"
,
405
:
"Method Not Allowed"
,
407
:
"Proxy Authentication Required"
,
406
:
"Not Acceptable"
,
408
:
"Request Time-out"
,
407
:
"Proxy Authentication Required"
,
409
:
"Conflict"
,
408
:
"Request Time-out"
,
410
:
"Gone"
,
409
:
"Conflict"
,
411
:
"Length Required"
,
410
:
"Gone"
,
412
:
"Precondition Failed"
,
411
:
"Length Required"
,
413
:
"Request Entity Too Large"
,
412
:
"Precondition Failed"
,
414
:
"Request-URI Too Large"
,
413
:
"Request Entity Too Large"
,
415
:
"Unsupported Media Type"
,
414
:
"Request-URI Too Large"
,
500
:
"Internal Server Error"
,
415
:
"Unsupported Media Type"
,
501
:
"Not Implemented"
,
500
:
"Internal Server Error"
,
502
:
"Bad Gateway"
,
501
:
"Not Implemented"
,
503
:
"Service Unavailable"
,
502
:
"Bad Gateway"
,
504
:
"Gateway Time-out"
,
503
:
"Service Unavailable"
,
505
:
"HTTP Version not supported"
504
:
"Gateway Time-out"
,
}
505
:
"HTTP Version not supported"
}
# Default error message
DEFAULT_ERROR_MESSAGE
=
string
.
join
(
# Default error message
[
'<head>'
,
DEFAULT_ERROR_MESSAGE
=
string
.
join
(
'<title>Error response</title>'
,
[
'<head>'
,
'</head>'
,
'<title>Error response</title>'
,
'<body>'
,
'</head>'
,
'<h1>Error response</h1>'
,
'<body>'
,
'<p>Error code %(code)d.'
,
'<h1>Error response</h1>'
,
'<p>Message: %(message)s.'
,
'<p>Error code %(code)d.'
,
'</body>'
,
'<p>Message: %(message)s.'
,
''
'</body>'
,
],
''
'
\
r
\
n
'
],
)
'
\
r
\
n
'
)
# ===========================================================================
# ===========================================================================
#
HTTP Channel Object
#
HTTP Channel Object
# ===========================================================================
# ===========================================================================
class
http_channel
(
asynchat
.
async_chat
):
class
http_channel
(
asynchat
.
async_chat
):
# use a larger default output buffer
# use a larger default output buffer
ac_out_buffer_size
=
1
<<
16
ac_out_buffer_size
=
1
<<
16
current_request
=
None
current_request
=
None
channel_counter
=
counter
()
channel_counter
=
counter
()
def
__init__
(
self
,
server
,
conn
,
addr
):
def
__init__
(
self
,
server
,
conn
,
addr
):
self
.
channel_number
=
http_channel
.
channel_counter
.
increment
()
self
.
channel_number
=
http_channel
.
channel_counter
.
increment
()
self
.
request_counter
=
counter
()
self
.
request_counter
=
counter
()
asynchat
.
async_chat
.
__init__
(
self
,
conn
)
asynchat
.
async_chat
.
__init__
(
self
,
conn
)
self
.
server
=
server
self
.
server
=
server
self
.
addr
=
addr
self
.
addr
=
addr
self
.
set_terminator
(
'
\
r
\
n
\
r
\
n
'
)
self
.
set_terminator
(
'
\
r
\
n
\
r
\
n
'
)
self
.
in_buffer
=
''
self
.
in_buffer
=
''
self
.
creation_time
=
int
(
time
.
time
())
self
.
creation_time
=
int
(
time
.
time
())
self
.
check_maintenance
()
self
.
check_maintenance
()
def
__repr__
(
self
):
def
__repr__
(
self
):
ar
=
asynchat
.
async_chat
.
__repr__
(
self
)[
1
:
-
1
]
ar
=
asynchat
.
async_chat
.
__repr__
(
self
)[
1
:
-
1
]
return
'<%s channel#: %s requests:%s>'
%
(
return
'<%s channel#: %s requests:%s>'
%
(
ar
,
ar
,
self
.
channel_number
,
self
.
channel_number
,
self
.
request_counter
self
.
request_counter
)
)
# Channel Counter, Maintenance Interval...
# Channel Counter, Maintenance Interval...
maintenance_interval
=
500
maintenance_interval
=
500
def
check_maintenance
(
self
):
def
check_maintenance
(
self
):
if
not
self
.
channel_number
%
self
.
maintenance_interval
:
if
not
self
.
channel_number
%
self
.
maintenance_interval
:
self
.
maintenance
()
self
.
maintenance
()
def
maintenance
(
self
):
def
maintenance
(
self
):
self
.
kill_zombies
()
self
.
kill_zombies
()
# 30-minute zombie timeout. status_handler also knows how to kill zombies.
# 30-minute zombie timeout. status_handler also knows how to kill zombies.
zombie_timeout
=
30
*
60
zombie_timeout
=
30
*
60
def
kill_zombies
(
self
):
def
kill_zombies
(
self
):
now
=
int
(
time
.
time
())
now
=
int
(
time
.
time
())
for
channel
in
asyncore
.
socket_map
.
values
():
for
channel
in
asyncore
.
socket_map
.
values
():
if
channel
.
__class__
==
self
.
__class__
:
if
channel
.
__class__
==
self
.
__class__
:
if
(
now
-
channel
.
creation_time
)
>
channel
.
zombie_timeout
:
if
(
now
-
channel
.
creation_time
)
>
channel
.
zombie_timeout
:
channel
.
close
()
channel
.
close
()
# --------------------------------------------------
# --------------------------------------------------
# send/recv overrides, good place for instrumentation.
# send/recv overrides, good place for instrumentation.
# --------------------------------------------------
# --------------------------------------------------
# this information needs to get into the request object,
# this information needs to get into the request object,
# so that it may log correctly.
# so that it may log correctly.
def
send
(
self
,
data
):
def
send
(
self
,
data
):
result
=
asynchat
.
async_chat
.
send
(
self
,
data
)
result
=
asynchat
.
async_chat
.
send
(
self
,
data
)
self
.
server
.
bytes_out
.
increment
(
len
(
data
))
self
.
server
.
bytes_out
.
increment
(
len
(
data
))
return
result
return
result
def
recv
(
self
,
buffer_size
):
def
recv
(
self
,
buffer_size
):
try
:
try
:
result
=
asynchat
.
async_chat
.
recv
(
self
,
buffer_size
)
result
=
asynchat
.
async_chat
.
recv
(
self
,
buffer_size
)
self
.
server
.
bytes_in
.
increment
(
len
(
result
))
self
.
server
.
bytes_in
.
increment
(
len
(
result
))
return
result
return
result
except
MemoryError
:
except
MemoryError
:
# --- Save a Trip to Your Service Provider ---
# --- Save a Trip to Your Service Provider ---
# It's possible for a process to eat up all the memory of
# It's possible for a process to eat up all the memory of
# the machine, and put it in an extremely wedged state,
# the machine, and put it in an extremely wedged state,
# where medusa keeps running and can't be shut down. This
# where medusa keeps running and can't be shut down. This
# is where MemoryError tends to get thrown, though of
# is where MemoryError tends to get thrown, though of
# course it could get thrown elsewhere.
# course it could get thrown elsewhere.
sys
.
exit
(
"Out of Memory!"
)
sys
.
exit
(
"Out of Memory!"
)
def
handle_error
(
self
):
def
handle_error
(
self
):
t
,
v
=
sys
.
exc_info
()[:
2
]
t
,
v
=
sys
.
exc_info
()[:
2
]
if
t
is
SystemExit
:
if
t
is
SystemExit
:
raise
t
,
v
raise
t
,
v
else
:
else
:
asynchat
.
async_chat
.
handle_error
(
self
)
asynchat
.
async_chat
.
handle_error
(
self
)
def
log
(
self
,
*
args
):
def
log
(
self
,
*
args
):
pass
pass
# --------------------------------------------------
# --------------------------------------------------
# async_chat methods
# async_chat methods
# --------------------------------------------------
# --------------------------------------------------
def
collect_incoming_data
(
self
,
data
):
def
collect_incoming_data
(
self
,
data
):
if
self
.
current_request
:
if
self
.
current_request
:
# we are receiving data (probably POST data) for a request
# we are receiving data (probably POST data) for a request
self
.
current_request
.
collect_incoming_data
(
data
)
self
.
current_request
.
collect_incoming_data
(
data
)
else
:
else
:
# we are receiving header (request) data
# we are receiving header (request) data
self
.
in_buffer
=
self
.
in_buffer
+
data
self
.
in_buffer
=
self
.
in_buffer
+
data
def
found_terminator
(
self
):
def
found_terminator
(
self
):
if
self
.
current_request
:
if
self
.
current_request
:
self
.
current_request
.
found_terminator
()
self
.
current_request
.
found_terminator
()
else
:
else
:
header
=
self
.
in_buffer
header
=
self
.
in_buffer
self
.
in_buffer
=
''
self
.
in_buffer
=
''
lines
=
string
.
split
(
header
,
'
\
r
\
n
'
)
lines
=
string
.
split
(
header
,
'
\
r
\
n
'
)
# --------------------------------------------------
# --------------------------------------------------
# crack the request header
# crack the request header
# --------------------------------------------------
# --------------------------------------------------
while
lines
and
not
lines
[
0
]:
while
lines
and
not
lines
[
0
]:
# as per the suggestion of http-1.1 section 4.1, (and
# as per the suggestion of http-1.1 section 4.1, (and
# Eric Parker <eparker@zyvex.com>), ignore a leading
# Eric Parker <eparker@zyvex.com>), ignore a leading
# blank lines (buggy browsers tack it onto the end of
# blank lines (buggy browsers tack it onto the end of
# POST requests)
# POST requests)
lines
=
lines
[
1
:]
lines
=
lines
[
1
:]
if
not
lines
:
if
not
lines
:
self
.
close_when_done
()
self
.
close_when_done
()
return
return
request
=
lines
[
0
]
request
=
lines
[
0
]
command
,
uri
,
version
=
crack_request
(
request
)
command
,
uri
,
version
=
crack_request
(
request
)
header
=
join_headers
(
lines
[
1
:])
header
=
join_headers
(
lines
[
1
:])
r
=
http_request
(
self
,
request
,
command
,
uri
,
version
,
header
)
r
=
http_request
(
self
,
request
,
command
,
uri
,
version
,
header
)
self
.
request_counter
.
increment
()
self
.
request_counter
.
increment
()
self
.
server
.
total_requests
.
increment
()
self
.
server
.
total_requests
.
increment
()
if
command
is
None
:
if
command
is
None
:
self
.
log_info
(
'Bad HTTP request: %s'
%
request
,
'error'
)
self
.
log_info
(
'Bad HTTP request: %s'
%
request
,
'error'
)
r
.
error
(
400
)
r
.
error
(
400
)
return
return
# --------------------------------------------------
# --------------------------------------------------
# handler selection and dispatch
# handler selection and dispatch
# --------------------------------------------------
# --------------------------------------------------
for
h
in
self
.
server
.
handlers
:
for
h
in
self
.
server
.
handlers
:
if
h
.
match
(
r
):
if
h
.
match
(
r
):
try
:
try
:
self
.
current_request
=
r
self
.
current_request
=
r
# This isn't used anywhere.
# This isn't used anywhere.
# r.handler = h # CYCLE
# r.handler = h # CYCLE
h
.
handle_request
(
r
)
h
.
handle_request
(
r
)
except
:
except
:
self
.
server
.
exceptions
.
increment
()
self
.
server
.
exceptions
.
increment
()
(
file
,
fun
,
line
),
t
,
v
,
tbinfo
=
asyncore
.
compact_traceback
()
(
file
,
fun
,
line
),
t
,
v
,
tbinfo
=
asyncore
.
compact_traceback
()
self
.
log_info
(
self
.
log_info
(
'Server Error: %s, %s: file: %s line: %s'
%
(
t
,
v
,
file
,
line
),
'Server Error: %s, %s: file: %s line: %s'
%
(
t
,
v
,
file
,
line
),
'error'
)
'error'
)
try
:
try
:
r
.
error
(
500
)
r
.
error
(
500
)
except
:
except
:
pass
pass
return
return
# no handlers, so complain
# no handlers, so complain
r
.
error
(
404
)
r
.
error
(
404
)
def
writable
(
self
):
def
writable
(
self
):
# this is just the normal async_chat 'writable', here for comparison
# this is just the normal async_chat 'writable', here for comparison
return
self
.
ac_out_buffer
or
len
(
self
.
producer_fifo
)
return
self
.
ac_out_buffer
or
len
(
self
.
producer_fifo
)
def
writable_for_proxy
(
self
):
def
writable_for_proxy
(
self
):
# this version of writable supports the idea of a 'stalled' producer
# this version of writable supports the idea of a 'stalled' producer
# [i.e., it's not ready to produce any output yet] This is needed by
# [i.e., it's not ready to produce any output yet] This is needed by
# the proxy, which will be waiting for the magic combination of
# the proxy, which will be waiting for the magic combination of
# 1) hostname resolved
# 1) hostname resolved
# 2) connection made
# 2) connection made
# 3) data available.
# 3) data available.
if
self
.
ac_out_buffer
:
if
self
.
ac_out_buffer
:
return
1
return
1
elif
len
(
self
.
producer_fifo
):
elif
len
(
self
.
producer_fifo
):
p
=
self
.
producer_fifo
.
first
()
p
=
self
.
producer_fifo
.
first
()
if
hasattr
(
p
,
'stalled'
):
if
hasattr
(
p
,
'stalled'
):
return
not
p
.
stalled
()
return
not
p
.
stalled
()
else
:
else
:
return
1
return
1
# ===========================================================================
# ===========================================================================
#
HTTP Server Object
#
HTTP Server Object
# ===========================================================================
# ===========================================================================
class
http_server
(
asyncore
.
dispatcher
):
class
http_server
(
asyncore
.
dispatcher
):
SERVER_IDENT
=
'HTTP Server (V%s)'
%
VERSION_STRING
SERVER_IDENT
=
'HTTP Server (V%s)'
%
VERSION_STRING
channel_class
=
http_channel
channel_class
=
http_channel
def
__init__
(
self
,
ip
,
port
,
resolver
=
None
,
logger_object
=
None
):
def
__init__
(
self
,
ip
,
port
,
resolver
=
None
,
logger_object
=
None
):
self
.
ip
=
ip
self
.
ip
=
ip
self
.
port
=
port
self
.
port
=
port
asyncore
.
dispatcher
.
__init__
(
self
)
asyncore
.
dispatcher
.
__init__
(
self
)
self
.
create_socket
(
socket
.
AF_INET
,
socket
.
SOCK_STREAM
)
self
.
create_socket
(
socket
.
AF_INET
,
socket
.
SOCK_STREAM
)
self
.
handlers
=
[]
self
.
handlers
=
[]
if
not
logger_object
:
if
not
logger_object
:
logger_object
=
logger
.
file_logger
(
sys
.
stdout
)
logger_object
=
logger
.
file_logger
(
sys
.
stdout
)
self
.
set_reuse_addr
()
self
.
set_reuse_addr
()
self
.
bind
((
ip
,
port
))
self
.
bind
((
ip
,
port
))
# lower this to 5 if your OS complains
# lower this to 5 if your OS complains
self
.
listen
(
1024
)
self
.
listen
(
1024
)
host
,
port
=
self
.
socket
.
getsockname
()
host
,
port
=
self
.
socket
.
getsockname
()
if
not
ip
:
if
not
ip
:
self
.
log_info
(
'Computing default hostname'
,
'warning'
)
self
.
log_info
(
'Computing default hostname'
,
'warning'
)
ip
=
socket
.
gethostbyname
(
socket
.
gethostname
())
ip
=
socket
.
gethostbyname
(
socket
.
gethostname
())
try
:
try
:
self
.
server_name
=
socket
.
gethostbyaddr
(
ip
)[
0
]
self
.
server_name
=
socket
.
gethostbyaddr
(
ip
)[
0
]
except
socket
.
error
:
except
socket
.
error
:
self
.
log_info
(
'Cannot do reverse lookup'
,
'warning'
)
self
.
log_info
(
'Cannot do reverse lookup'
,
'warning'
)
self
.
server_name
=
ip
# use the IP address as the "hostname"
self
.
server_name
=
ip
# use the IP address as the "hostname"
self
.
server_port
=
port
self
.
server_port
=
port
self
.
total_clients
=
counter
()
self
.
total_clients
=
counter
()
self
.
total_requests
=
counter
()
self
.
total_requests
=
counter
()
self
.
exceptions
=
counter
()
self
.
exceptions
=
counter
()
self
.
bytes_out
=
counter
()
self
.
bytes_out
=
counter
()
self
.
bytes_in
=
counter
()
self
.
bytes_in
=
counter
()
if
not
logger_object
:
if
not
logger_object
:
logger_object
=
logger
.
file_logger
(
sys
.
stdout
)
logger_object
=
logger
.
file_logger
(
sys
.
stdout
)
if
resolver
:
if
resolver
:
self
.
logger
=
logger
.
resolving_logger
(
resolver
,
logger_object
)
self
.
logger
=
logger
.
resolving_logger
(
resolver
,
logger_object
)
else
:
else
:
self
.
logger
=
logger
.
unresolving_logger
(
logger_object
)
self
.
logger
=
logger
.
unresolving_logger
(
logger_object
)
self
.
log_info
(
self
.
log_info
(
'Medusa (V%s) started at %s'
'Medusa (V%s) started at %s'
'
\
n
\
t
Hostname: %s'
'
\
n
\
t
Hostname: %s'
'
\
n
\
t
Port:%d'
'
\
n
\
t
Port:%d'
'
\
n
'
%
(
'
\
n
'
%
(
VERSION_STRING
,
VERSION_STRING
,
time
.
ctime
(
time
.
time
()),
time
.
ctime
(
time
.
time
()),
self
.
server_name
,
self
.
server_name
,
port
,
port
,
)
)
)
)
def
writable
(
self
):
def
writable
(
self
):
return
0
return
0
def
handle_read
(
self
):
def
handle_read
(
self
):
pass
pass
def
readable
(
self
):
def
readable
(
self
):
return
self
.
accepting
return
self
.
accepting
def
handle_connect
(
self
):
def
handle_connect
(
self
):
pass
pass
def
handle_accept
(
self
):
def
handle_accept
(
self
):
self
.
total_clients
.
increment
()
self
.
total_clients
.
increment
()
try
:
try
:
conn
,
addr
=
self
.
accept
()
conn
,
addr
=
self
.
accept
()
except
socket
.
error
:
except
socket
.
error
:
# linux: on rare occasions we get a bogus socket back from
# linux: on rare occasions we get a bogus socket back from
# accept. socketmodule.c:makesockaddr complains that the
# accept. socketmodule.c:makesockaddr complains that the
# address family is unknown. We don't want the whole server
# address family is unknown. We don't want the whole server
# to shut down because of this.
# to shut down because of this.
self
.
log_info
(
'warning: server accept() threw an exception'
,
'warning'
)
self
.
log_info
(
'warning: server accept() threw an exception'
,
'warning'
)
return
return
except
TypeError
:
except
TypeError
:
# unpack non-sequence. this can happen when a read event
# unpack non-sequence. this can happen when a read event
# fires on a listening socket, but when we call accept()
# fires on a listening socket, but when we call accept()
# we get EWOULDBLOCK, so dispatcher.accept() returns None.
# we get EWOULDBLOCK, so dispatcher.accept() returns None.
# Seen on FreeBSD3.
# Seen on FreeBSD3.
self
.
log_info
(
'warning: server accept() threw EWOULDBLOCK'
,
'warning'
)
self
.
log_info
(
'warning: server accept() threw EWOULDBLOCK'
,
'warning'
)
return
return
self
.
channel_class
(
self
,
conn
,
addr
)
self
.
channel_class
(
self
,
conn
,
addr
)
def
install_handler
(
self
,
handler
,
back
=
0
):
def
install_handler
(
self
,
handler
,
back
=
0
):
if
back
:
if
back
:
self
.
handlers
.
append
(
handler
)
self
.
handlers
.
append
(
handler
)
else
:
else
:
self
.
handlers
.
insert
(
0
,
handler
)
self
.
handlers
.
insert
(
0
,
handler
)
def
remove_handler
(
self
,
handler
):
def
remove_handler
(
self
,
handler
):
self
.
handlers
.
remove
(
handler
)
self
.
handlers
.
remove
(
handler
)
def
status
(
self
):
def
status
(
self
):
def
nice_bytes
(
n
):
def
nice_bytes
(
n
):
return
string
.
join
(
status_handler
.
english_bytes
(
n
))
return
string
.
join
(
status_handler
.
english_bytes
(
n
))
handler_stats
=
filter
(
None
,
map
(
maybe_status
,
self
.
handlers
))
handler_stats
=
filter
(
None
,
map
(
maybe_status
,
self
.
handlers
))
if
self
.
total_clients
:
if
self
.
total_clients
:
ratio
=
self
.
total_requests
.
as_long
()
/
float
(
self
.
total_clients
.
as_long
())
ratio
=
self
.
total_requests
.
as_long
()
/
float
(
self
.
total_clients
.
as_long
())
else
:
else
:
ratio
=
0.0
ratio
=
0.0
return
producers
.
composite_producer
(
return
producers
.
composite_producer
(
fifo
([
producers
.
lines_producer
(
fifo
([
producers
.
lines_producer
(
[
'<h2>%s</h2>'
%
self
.
SERVER_IDENT
,
[
'<h2>%s</h2>'
%
self
.
SERVER_IDENT
,
'<br>Listening on: <b>Host:</b> %s'
%
self
.
server_name
,
'<br>Listening on: <b>Host:</b> %s'
%
self
.
server_name
,
'<b>Port:</b> %d'
%
self
.
port
,
'<b>Port:</b> %d'
%
self
.
port
,
'<p><ul>'
'<p><ul>'
'<li>Total <b>Clients:</b> %s'
%
self
.
total_clients
,
'<li>Total <b>Clients:</b> %s'
%
self
.
total_clients
,
'<b>Requests:</b> %s'
%
self
.
total_requests
,
'<b>Requests:</b> %s'
%
self
.
total_requests
,
'<b>Requests/Client:</b> %.1f'
%
(
ratio
),
'<b>Requests/Client:</b> %.1f'
%
(
ratio
),
'<li>Total <b>Bytes In:</b> %s'
%
(
nice_bytes
(
self
.
bytes_in
.
as_long
())),
'<li>Total <b>Bytes In:</b> %s'
%
(
nice_bytes
(
self
.
bytes_in
.
as_long
())),
'<b>Bytes Out:</b> %s'
%
(
nice_bytes
(
self
.
bytes_out
.
as_long
())),
'<b>Bytes Out:</b> %s'
%
(
nice_bytes
(
self
.
bytes_out
.
as_long
())),
'<li>Total <b>Exceptions:</b> %s'
%
self
.
exceptions
,
'<li>Total <b>Exceptions:</b> %s'
%
self
.
exceptions
,
'</ul><p>'
'</ul><p>'
'<b>Extension List</b><ul>'
,
'<b>Extension List</b><ul>'
,
])]
+
handler_stats
+
[
producers
.
simple_producer
(
'</ul>'
)]
])]
+
handler_stats
+
[
producers
.
simple_producer
(
'</ul>'
)]
)
)
)
)
def
maybe_status
(
thing
):
def
maybe_status
(
thing
):
if
hasattr
(
thing
,
'status'
):
if
hasattr
(
thing
,
'status'
):
return
thing
.
status
()
return
thing
.
status
()
else
:
else
:
return
None
return
None
CONNECTION
=
regex
.
compile
(
'Connection:
\
(.*
\
)'
,
regex
.
casefold
)
CONNECTION
=
regex
.
compile
(
'Connection:
\
(.*
\
)'
,
regex
.
casefold
)
# merge multi-line headers
# merge multi-line headers
# [486dx2: ~500/sec]
# [486dx2: ~500/sec]
def
join_headers
(
headers
):
def
join_headers
(
headers
):
r
=
[]
r
=
[]
for
i
in
range
(
len
(
headers
)):
for
i
in
range
(
len
(
headers
)):
if
headers
[
i
][
0
]
in
'
\
t
'
:
if
headers
[
i
][
0
]
in
'
\
t
'
:
r
[
-
1
]
=
r
[
-
1
]
+
headers
[
i
][
1
:]
r
[
-
1
]
=
r
[
-
1
]
+
headers
[
i
][
1
:]
else
:
else
:
r
.
append
(
headers
[
i
])
r
.
append
(
headers
[
i
])
return
r
return
r
def
get_header
(
head_reg
,
lines
,
group
=
1
):
def
get_header
(
head_reg
,
lines
,
group
=
1
):
for
line
in
lines
:
for
line
in
lines
:
if
head_reg
.
match
(
line
)
==
len
(
line
):
if
head_reg
.
match
(
line
)
==
len
(
line
):
return
head_reg
.
group
(
group
)
return
head_reg
.
group
(
group
)
return
''
return
''
REQUEST
=
re
.
compile
(
'([^ ]+) (?:[^ :?#]+://[^ ?#/]*)?([^ ]+)(( HTTP/([0-9.]+))$|$)'
)
REQUEST
=
re
.
compile
(
'([^ ]+) (?:[^ :?#]+://[^ ?#/]*)?([^ ]+)(( HTTP/([0-9.]+))$|$)'
)
...
@@ -683,91 +684,91 @@ def crack_request (r):
...
@@ -683,91 +684,91 @@ def crack_request (r):
if
m
is
not
None
:
if
m
is
not
None
:
return
string
.
lower
(
m
.
group
(
1
)),
m
.
group
(
2
),
m
.
group
(
5
)
return
string
.
lower
(
m
.
group
(
1
)),
m
.
group
(
2
),
m
.
group
(
5
)
else
:
else
:
return
None
,
None
,
None
return
None
,
None
,
None
class
fifo
:
class
fifo
:
def
__init__
(
self
,
list
=
None
):
def
__init__
(
self
,
list
=
None
):
if
not
list
:
if
not
list
:
self
.
list
=
[]
self
.
list
=
[]
else
:
else
:
self
.
list
=
list
self
.
list
=
list
def
__len__
(
self
):
def
__len__
(
self
):
return
len
(
self
.
list
)
return
len
(
self
.
list
)
def
first
(
self
):
def
first
(
self
):
return
self
.
list
[
0
]
return
self
.
list
[
0
]
def
push_front
(
self
,
object
):
def
push_front
(
self
,
object
):
self
.
list
.
insert
(
0
,
object
)
self
.
list
.
insert
(
0
,
object
)
def
push
(
self
,
data
):
def
push
(
self
,
data
):
self
.
list
.
append
(
data
)
self
.
list
.
append
(
data
)
def
pop
(
self
):
def
pop
(
self
):
if
self
.
list
:
if
self
.
list
:
result
=
self
.
list
[
0
]
result
=
self
.
list
[
0
]
del
self
.
list
[
0
]
del
self
.
list
[
0
]
return
(
1
,
result
)
return
(
1
,
result
)
else
:
else
:
return
(
0
,
None
)
return
(
0
,
None
)
def
compute_timezone_for_log
():
def
compute_timezone_for_log
():
if
time
.
daylight
:
if
time
.
daylight
:
tz
=
time
.
altzone
tz
=
time
.
altzone
else
:
else
:
tz
=
time
.
timezone
tz
=
time
.
timezone
if
tz
>
0
:
if
tz
>
0
:
neg
=
1
neg
=
1
else
:
else
:
neg
=
0
neg
=
0
tz
=
-
tz
tz
=
-
tz
h
,
rem
=
divmod
(
tz
,
3600
)
h
,
rem
=
divmod
(
tz
,
3600
)
m
,
rem
=
divmod
(
rem
,
60
)
m
,
rem
=
divmod
(
rem
,
60
)
if
neg
:
if
neg
:
return
'-%02d%02d'
%
(
h
,
m
)
return
'-%02d%02d'
%
(
h
,
m
)
else
:
else
:
return
'+%02d%02d'
%
(
h
,
m
)
return
'+%02d%02d'
%
(
h
,
m
)
# if you run this program over a TZ change boundary, this will be invalid.
# if you run this program over a TZ change boundary, this will be invalid.
tz_for_log
=
compute_timezone_for_log
()
tz_for_log
=
compute_timezone_for_log
()
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
import
sys
import
sys
if
len
(
sys
.
argv
)
<
2
:
if
len
(
sys
.
argv
)
<
2
:
print
'usage: %s <root> <port>'
%
(
sys
.
argv
[
0
])
print
'usage: %s <root> <port>'
%
(
sys
.
argv
[
0
])
else
:
else
:
import
monitor
import
monitor
import
filesys
import
filesys
import
default_handler
import
default_handler
import
status_handler
import
status_handler
import
ftp_server
import
ftp_server
import
chat_server
import
chat_server
import
resolver
import
resolver
import
logger
import
logger
rs
=
resolver
.
caching_resolver
(
'127.0.0.1'
)
rs
=
resolver
.
caching_resolver
(
'127.0.0.1'
)
lg
=
logger
.
file_logger
(
sys
.
stdout
)
lg
=
logger
.
file_logger
(
sys
.
stdout
)
ms
=
monitor
.
secure_monitor_server
(
'fnord'
,
'127.0.0.1'
,
9999
)
ms
=
monitor
.
secure_monitor_server
(
'fnord'
,
'127.0.0.1'
,
9999
)
fs
=
filesys
.
os_filesystem
(
sys
.
argv
[
1
])
fs
=
filesys
.
os_filesystem
(
sys
.
argv
[
1
])
dh
=
default_handler
.
default_handler
(
fs
)
dh
=
default_handler
.
default_handler
(
fs
)
hs
=
http_server
(
''
,
string
.
atoi
(
sys
.
argv
[
2
]),
rs
,
lg
)
hs
=
http_server
(
''
,
string
.
atoi
(
sys
.
argv
[
2
]),
rs
,
lg
)
hs
.
install_handler
(
dh
)
hs
.
install_handler
(
dh
)
ftp
=
ftp_server
.
ftp_server
(
ftp
=
ftp_server
.
ftp_server
(
ftp_server
.
dummy_authorizer
(
sys
.
argv
[
1
]),
ftp_server
.
dummy_authorizer
(
sys
.
argv
[
1
]),
port
=
8021
,
port
=
8021
,
resolver
=
rs
,
resolver
=
rs
,
logger_object
=
lg
logger_object
=
lg
)
)
cs
=
chat_server
.
chat_server
(
''
,
7777
)
cs
=
chat_server
.
chat_server
(
''
,
7777
)
sh
=
status_handler
.
status_extension
([
hs
,
ms
,
ftp
,
cs
,
rs
])
sh
=
status_handler
.
status_extension
([
hs
,
ms
,
ftp
,
cs
,
rs
])
hs
.
install_handler
(
sh
)
hs
.
install_handler
(
sh
)
if
(
'-p'
in
sys
.
argv
):
if
(
'-p'
in
sys
.
argv
):
def
profile_loop
():
def
profile_loop
():
try
:
try
:
asyncore
.
loop
()
asyncore
.
loop
()
except
KeyboardInterrupt
:
except
KeyboardInterrupt
:
pass
pass
import
profile
import
profile
profile
.
run
(
'profile_loop()'
,
'profile.out'
)
profile
.
run
(
'profile_loop()'
,
'profile.out'
)
else
:
else
:
asyncore
.
loop
()
asyncore
.
loop
()
lib/python/ZServer/medusa/http_server.py
View file @
05e980af
#! /usr/local/bin/python
#! /usr/local/bin/python
# -*- Mode: Python; tab-width: 4 -*-
# -*- Mode: Python; tab-width: 4 -*-
#
#
#
Author: Sam Rushing <rushing@nightmare.com>
#
Author: Sam Rushing <rushing@nightmare.com>
#
Copyright 1996-2000 by Sam Rushing
#
Copyright 1996-2000 by Sam Rushing
#
All Rights Reserved.
#
All Rights Reserved.
#
#
RCS_ID
=
'$Id: http_server.py,v 1.
19 2000/08/23 20:39:26
brian Exp $'
RCS_ID
=
'$Id: http_server.py,v 1.
20 2001/02/13 21:18:54
brian Exp $'
# python modules
# python modules
import
os
import
os
...
@@ -33,648 +33,649 @@ VERSION_STRING = string.split(RCS_ID)[2]
...
@@ -33,648 +33,649 @@ VERSION_STRING = string.split(RCS_ID)[2]
from
counter
import
counter
from
counter
import
counter
# ===========================================================================
# ===========================================================================
#
Request Object
#
Request Object
# ===========================================================================
# ===========================================================================
class
http_request
:
class
http_request
:
# default reply code
# default reply code
reply_code
=
200
reply_code
=
200
request_counter
=
counter
()
request_counter
=
counter
()
# Whether to automatically use chunked encoding when
# Whether to automatically use chunked encoding when
#
#
# HTTP version is 1.1
# HTTP version is 1.1
# Content-Length is not set
# Content-Length is not set
# Chunked encoding is not already in effect
# Chunked encoding is not already in effect
#
#
# If your clients are having trouble, you might want to disable this.
# If your clients are having trouble, you might want to disable this.
use_chunked
=
1
use_chunked
=
1
# by default, this request object ignores user data.
# by default, this request object ignores user data.
collector
=
None
collector
=
None
def
__init__
(
self
,
*
args
):
def
__init__
(
self
,
*
args
):
# unpack information about the request
# unpack information about the request
(
self
.
channel
,
self
.
request
,
(
self
.
channel
,
self
.
request
,
self
.
command
,
self
.
uri
,
self
.
version
,
self
.
command
,
self
.
uri
,
self
.
version
,
self
.
header
)
=
args
self
.
header
)
=
args
self
.
outgoing
=
fifo
()
self
.
outgoing
=
fifo
()
self
.
reply_headers
=
{
self
.
reply_headers
=
{
'Server'
:
'Medusa/%s'
%
VERSION_STRING
,
'Server'
:
'Medusa/%s'
%
VERSION_STRING
,
'Date'
:
http_date
.
build_http_date
(
time
.
time
())
'Date'
:
http_date
.
build_http_date
(
time
.
time
())
}
}
self
.
request_number
=
http_request
.
request_counter
.
increment
()
self
.
request_number
=
http_request
.
request_counter
.
increment
()
self
.
_split_uri
=
None
self
.
_split_uri
=
None
self
.
_header_cache
=
{}
self
.
_header_cache
=
{}
# --------------------------------------------------
# --------------------------------------------------
# reply header management
# reply header management
# --------------------------------------------------
# --------------------------------------------------
def
__setitem__
(
self
,
key
,
value
):
def
__setitem__
(
self
,
key
,
value
):
self
.
reply_headers
[
key
]
=
value
self
.
reply_headers
[
key
]
=
value
def
__getitem__
(
self
,
key
):
def
__getitem__
(
self
,
key
):
return
self
.
reply_headers
[
key
]
return
self
.
reply_headers
[
key
]
def
has_key
(
self
,
key
):
def
has_key
(
self
,
key
):
return
self
.
reply_headers
.
has_key
(
key
)
return
self
.
reply_headers
.
has_key
(
key
)
def
build_reply_header
(
self
):
def
build_reply_header
(
self
):
return
string
.
join
(
return
string
.
join
(
[
self
.
response
(
self
.
reply_code
)]
+
map
(
[
self
.
response
(
self
.
reply_code
)]
+
map
(
lambda
x
:
'%s: %s'
%
x
,
lambda
x
:
'%s: %s'
%
x
,
self
.
reply_headers
.
items
()
self
.
reply_headers
.
items
()
),
),
'
\
r
\
n
'
'
\
r
\
n
'
)
+
'
\
r
\
n
\
r
\
n
'
)
+
'
\
r
\
n
\
r
\
n
'
# --------------------------------------------------
# --------------------------------------------------
# split a uri
# split a uri
# --------------------------------------------------
# --------------------------------------------------
# <path>;<params>?<query>#<fragment>
# <path>;<params>?<query>#<fragment>
path_regex
=
regex
.
compile
(
path_regex
=
regex
.
compile
(
# path params query fragment
# path params query fragment
'
\
\
([^;?#]*
\
\
)
\
\
(;[^?#]*
\
\
)?
\
\
(
\
\
?[^#]*
\
)?
\
(#.*
\
)?
'
'
\
\
([^;?#]*
\
\
)
\
\
(;[^?#]*
\
\
)?
\
\
(
\
\
?[^#]*
\
)?
\
(#.*
\
)?
'
)
)
def split_uri (self):
def split_uri (self):
if self._split_uri is None:
if self._split_uri is None:
if self.path_regex.match (self.uri) != len(self.uri):
if self.path_regex.match (self.uri) != len(self.uri):
raise ValueError, "Broken URI"
raise ValueError, "Broken URI"
else:
else:
self._split_uri = map (lambda i,r=self.path_regex: r.group(i), range(1,5))
self._split_uri = map (lambda i,r=self.path_regex: r.group(i), range(1,5))
return self._split_uri
return self._split_uri
def get_header_with_regex (self, head_reg, group):
def get_header_with_regex (self, head_reg, group):
for line in self.header:
for line in self.header:
if head_reg.match (line) == len(line):
if head_reg.match (line) == len(line):
return head_reg.group(group)
return head_reg.group(group)
return ''
return ''
def get_header (self, header):
def get_header (self, header):
header = string.lower (header)
header = string.lower (header)
hc = self._header_cache
hc = self._header_cache
if not hc.has_key (header):
if not hc.has_key (header):
h = header + '
:
'
h = header + '
:
'
hl = len(h)
hl = len(h)
for line in self.header:
for line in self.header:
if string.lower (line[:hl]) == h:
if string.lower (line[:hl]) == h:
r = line[hl:]
r = line[hl:]
hc[header] = r
hc[header] = r
return r
return r
hc[header] = None
hc[header] = None
return None
return None
else:
else:
return hc[header]
return hc[header]
# --------------------------------------------------
# --------------------------------------------------
# user data
# user data
# --------------------------------------------------
# --------------------------------------------------
def collect_incoming_data (self, data):
def collect_incoming_data (self, data):
if self.collector:
if self.collector:
self.collector.collect_incoming_data (data)
self.collector.collect_incoming_data (data)
else:
else:
self.log_info(
self.log_info(
'
Dropping
%
d
bytes
of
incoming
request
data
' % len(data),
'
Dropping
%
d
bytes
of
incoming
request
data
' % len(data),
'
warning
'
'
warning
'
)
)
def found_terminator (self):
def found_terminator (self):
if self.collector:
if self.collector:
self.collector.found_terminator()
self.collector.found_terminator()
else:
else:
self.log_info (
self.log_info (
'
Unexpected
end
-
of
-
record
for
incoming
request
',
'
Unexpected
end
-
of
-
record
for
incoming
request
',
'
warning
'
'
warning
'
)
)
def push (self, thing):
def push (self, thing):
if type(thing) == type(''):
if type(thing) == type(''):
self.outgoing.push (producers.simple_producer (thing))
self.outgoing.push (producers.simple_producer (thing))
else:
else:
self.outgoing.push (thing)
self.outgoing.push (thing)
def response (self, code=200):
def response (self, code=200):
message = self.responses[code]
message = self.responses[code]
self.reply_code = code
self.reply_code = code
return '
HTTP
/%
s
%
d
%
s
' % (self.version, code, message)
return '
HTTP
/%
s
%
d
%
s
' % (self.version, code, message)
def error (self, code):
def error (self, code):
self.reply_code = code
self.reply_code = code
message = self.responses[code]
message = self.responses[code]
s = self.DEFAULT_ERROR_MESSAGE % {
s = self.DEFAULT_ERROR_MESSAGE % {
'
code
': code,
'
code
': code,
'
message
': message,
'
message
': message,
}
}
self['
Content
-
Length
'] = len(s)
self['
Content
-
Length
'] = len(s)
self['
Content
-
Type
'] = '
text
/
html
'
self['
Content
-
Type
'] = '
text
/
html
'
# make an error reply
# make an error reply
self.push (s)
self.push (s)
self.done()
self.done()
# can also be used for empty replies
# can also be used for empty replies
reply_now = error
reply_now = error
def done (self):
def done (self):
"finalize this transaction - send output to the http channel"
"finalize this transaction - send output to the http channel"
# ----------------------------------------
# ----------------------------------------
# persistent connection management
# persistent connection management
# ----------------------------------------
# ----------------------------------------
# --- BUCKLE UP! ----
# --- BUCKLE UP! ----
connection = string.lower (get_header (CONNECTION, self.header))
connection = string.lower (get_header (CONNECTION, self.header))
close_it = 0
close_it = 0
wrap_in_chunking = 0
wrap_in_chunking = 0
if self.version == '
1.0
':
if self.version == '
1.0
':
if connection == '
keep
-
alive
':
if connection == '
keep
-
alive
':
if not self.has_key ('
Content
-
Length
'):
if not self.has_key ('
Content
-
Length
'):
close_it = 1
close_it = 1
else:
else:
self['
Connection
'] = '
Keep
-
Alive
'
self['
Connection
'] = '
Keep
-
Alive
'
else:
else:
close_it = 1
close_it = 1
elif self.version == '
1.1
':
elif self.version == '
1.1
':
if connection == '
close
':
if connection == '
close
':
close_it = 1
close_it = 1
elif not self.has_key ('
Content
-
Length
'):
elif not self.has_key ('
Content
-
Length
'):
if self.has_key ('
Transfer
-
Encoding
'):
if self.has_key ('
Transfer
-
Encoding
'):
if not self['
Transfer
-
Encoding
'] == '
chunked
':
if not self['
Transfer
-
Encoding
'] == '
chunked
':
close_it = 1
close_it = 1
elif self.use_chunked:
elif self.use_chunked:
self['
Transfer
-
Encoding
'] = '
chunked
'
self['
Transfer
-
Encoding
'] = '
chunked
'
wrap_in_chunking = 1
wrap_in_chunking = 1
else:
else:
close_it = 1
close_it = 1
elif self.version is None:
elif self.version is None:
# Although we don'
t
*
really
*
support
http
/
0.9
(
because
we
'd have to
# Although we don'
t
*
really
*
support
http
/
0.9
(
because
we
'd have to
# use
\
r
\
n
as a terminator, and it would just yuck up a lot of stuff)
# use
\
r
\
n
as a terminator, and it would just yuck up a lot of stuff)
# it'
s
very
common
for
developers
to
not
want
to
type
a
version
number
# it'
s
very
common
for
developers
to
not
want
to
type
a
version
number
# when using telnet to debug a server.
# when using telnet to debug a server.
close_it
=
1
close_it
=
1
outgoing_header
=
producers
.
simple_producer
(
self
.
build_reply_header
())
outgoing_header
=
producers
.
simple_producer
(
self
.
build_reply_header
())
if
close_it
:
if
close_it
:
self
[
'Connection'
]
=
'close'
self
[
'Connection'
]
=
'close'
if
wrap_in_chunking
:
if
wrap_in_chunking
:
outgoing_producer
=
producers
.
chunked_producer
(
outgoing_producer
=
producers
.
chunked_producer
(
producers
.
composite_producer
(
self
.
outgoing
)
producers
.
composite_producer
(
self
.
outgoing
)
)
)
# prepend the header
# prepend the header
outgoing_producer
=
producers
.
composite_producer
(
outgoing_producer
=
producers
.
composite_producer
(
fifo
([
outgoing_header
,
outgoing_producer
])
fifo
([
outgoing_header
,
outgoing_producer
])
)
)
else
:
else
:
# prepend the header
# prepend the header
self
.
outgoing
.
push_front
(
outgoing_header
)
self
.
outgoing
.
push_front
(
outgoing_header
)
outgoing_producer
=
producers
.
composite_producer
(
self
.
outgoing
)
outgoing_producer
=
producers
.
composite_producer
(
self
.
outgoing
)
# apply a few final transformations to the output
# apply a few final transformations to the output
self
.
channel
.
push_with_producer
(
self
.
channel
.
push_with_producer
(
# globbing gives us large packets
# globbing gives us large packets
producers
.
globbing_producer
(
producers
.
globbing_producer
(
# hooking lets us log the number of bytes sent
# hooking lets us log the number of bytes sent
producers
.
hooked_producer
(
producers
.
hooked_producer
(
outgoing_producer
,
outgoing_producer
,
self
.
log
self
.
log
)
)
)
)
)
)
self
.
channel
.
current_request
=
None
self
.
channel
.
current_request
=
None
if
close_it
:
if
close_it
:
self
.
channel
.
close_when_done
()
self
.
channel
.
close_when_done
()
def
log_date_string
(
self
,
when
):
def
log_date_string
(
self
,
when
):
return
time
.
strftime
(
logtime
=
time
.
localtime
(
when
)
'%d/%b/%Y:%H:%M:%S '
,
return
time
.
strftime
(
'%d/'
,
logtime
)
+
\
time
.
localtime
(
when
)
http_date
.
monthname
[
logtime
[
1
]]
+
\
)
+
tz_for_log
time
.
strftime
(
'/%Y:%H:%M:%S '
,
logtime
)
+
\
tz_for_log
def
log
(
self
,
bytes
):
user_agent
=
self
.
get_header
(
'user-agent'
)
def
log
(
self
,
bytes
):
if
not
user_agent
:
user_agent
=
''
user_agent
=
self
.
get_header
(
'user-agent'
)
referer
=
self
.
get_header
(
'referer'
)
if
not
user_agent
:
user_agent
=
''
if
not
referer
:
referer
=
''
referer
=
self
.
get_header
(
'referer'
)
self
.
channel
.
server
.
logger
.
log
(
if
not
referer
:
referer
=
''
self
.
channel
.
addr
[
0
],
self
.
channel
.
server
.
logger
.
log
(
' - - [%s] "%s" %d %d "%s" "%s"
\
n
'
%
(
self
.
channel
.
addr
[
0
],
# self.channel.addr[1],
' - - [%s] "%s" %d %d "%s" "%s"
\
n
'
%
(
self
.
log_date_string
(
time
.
time
()),
# self.channel.addr[1],
self
.
request
,
self
.
log_date_string
(
time
.
time
()),
self
.
reply_code
,
self
.
request
,
bytes
,
self
.
reply_code
,
referer
,
bytes
,
user_agent
referer
,
)
user_agent
)
)
)
responses
=
{
100
:
"Continue"
,
responses
=
{
101
:
"Switching Protocols"
,
100
:
"Continue"
,
200
:
"OK"
,
101
:
"Switching Protocols"
,
201
:
"Created"
,
200
:
"OK"
,
202
:
"Accepted"
,
201
:
"Created"
,
203
:
"Non-Authoritative Information"
,
202
:
"Accepted"
,
204
:
"No Content"
,
203
:
"Non-Authoritative Information"
,
205
:
"Reset Content"
,
204
:
"No Content"
,
206
:
"Partial Content"
,
205
:
"Reset Content"
,
300
:
"Multiple Choices"
,
206
:
"Partial Content"
,
301
:
"Moved Permanently"
,
300
:
"Multiple Choices"
,
302
:
"Moved Temporarily"
,
301
:
"Moved Permanently"
,
303
:
"See Other"
,
302
:
"Moved Temporarily"
,
304
:
"Not Modified"
,
303
:
"See Other"
,
305
:
"Use Proxy"
,
304
:
"Not Modified"
,
400
:
"Bad Request"
,
305
:
"Use Proxy"
,
401
:
"Unauthorized"
,
400
:
"Bad Request"
,
402
:
"Payment Required"
,
401
:
"Unauthorized"
,
403
:
"Forbidden"
,
402
:
"Payment Required"
,
404
:
"Not Found"
,
403
:
"Forbidden"
,
405
:
"Method Not Allowed"
,
404
:
"Not Found"
,
406
:
"Not Acceptable"
,
405
:
"Method Not Allowed"
,
407
:
"Proxy Authentication Required"
,
406
:
"Not Acceptable"
,
408
:
"Request Time-out"
,
407
:
"Proxy Authentication Required"
,
409
:
"Conflict"
,
408
:
"Request Time-out"
,
410
:
"Gone"
,
409
:
"Conflict"
,
411
:
"Length Required"
,
410
:
"Gone"
,
412
:
"Precondition Failed"
,
411
:
"Length Required"
,
413
:
"Request Entity Too Large"
,
412
:
"Precondition Failed"
,
414
:
"Request-URI Too Large"
,
413
:
"Request Entity Too Large"
,
415
:
"Unsupported Media Type"
,
414
:
"Request-URI Too Large"
,
500
:
"Internal Server Error"
,
415
:
"Unsupported Media Type"
,
501
:
"Not Implemented"
,
500
:
"Internal Server Error"
,
502
:
"Bad Gateway"
,
501
:
"Not Implemented"
,
503
:
"Service Unavailable"
,
502
:
"Bad Gateway"
,
504
:
"Gateway Time-out"
,
503
:
"Service Unavailable"
,
505
:
"HTTP Version not supported"
504
:
"Gateway Time-out"
,
}
505
:
"HTTP Version not supported"
}
# Default error message
DEFAULT_ERROR_MESSAGE
=
string
.
join
(
# Default error message
[
'<head>'
,
DEFAULT_ERROR_MESSAGE
=
string
.
join
(
'<title>Error response</title>'
,
[
'<head>'
,
'</head>'
,
'<title>Error response</title>'
,
'<body>'
,
'</head>'
,
'<h1>Error response</h1>'
,
'<body>'
,
'<p>Error code %(code)d.'
,
'<h1>Error response</h1>'
,
'<p>Message: %(message)s.'
,
'<p>Error code %(code)d.'
,
'</body>'
,
'<p>Message: %(message)s.'
,
''
'</body>'
,
],
''
'
\
r
\
n
'
],
)
'
\
r
\
n
'
)
# ===========================================================================
# ===========================================================================
#
HTTP Channel Object
#
HTTP Channel Object
# ===========================================================================
# ===========================================================================
class
http_channel
(
asynchat
.
async_chat
):
class
http_channel
(
asynchat
.
async_chat
):
# use a larger default output buffer
# use a larger default output buffer
ac_out_buffer_size
=
1
<<
16
ac_out_buffer_size
=
1
<<
16
current_request
=
None
current_request
=
None
channel_counter
=
counter
()
channel_counter
=
counter
()
def
__init__
(
self
,
server
,
conn
,
addr
):
def
__init__
(
self
,
server
,
conn
,
addr
):
self
.
channel_number
=
http_channel
.
channel_counter
.
increment
()
self
.
channel_number
=
http_channel
.
channel_counter
.
increment
()
self
.
request_counter
=
counter
()
self
.
request_counter
=
counter
()
asynchat
.
async_chat
.
__init__
(
self
,
conn
)
asynchat
.
async_chat
.
__init__
(
self
,
conn
)
self
.
server
=
server
self
.
server
=
server
self
.
addr
=
addr
self
.
addr
=
addr
self
.
set_terminator
(
'
\
r
\
n
\
r
\
n
'
)
self
.
set_terminator
(
'
\
r
\
n
\
r
\
n
'
)
self
.
in_buffer
=
''
self
.
in_buffer
=
''
self
.
creation_time
=
int
(
time
.
time
())
self
.
creation_time
=
int
(
time
.
time
())
self
.
check_maintenance
()
self
.
check_maintenance
()
def
__repr__
(
self
):
def
__repr__
(
self
):
ar
=
asynchat
.
async_chat
.
__repr__
(
self
)[
1
:
-
1
]
ar
=
asynchat
.
async_chat
.
__repr__
(
self
)[
1
:
-
1
]
return
'<%s channel#: %s requests:%s>'
%
(
return
'<%s channel#: %s requests:%s>'
%
(
ar
,
ar
,
self
.
channel_number
,
self
.
channel_number
,
self
.
request_counter
self
.
request_counter
)
)
# Channel Counter, Maintenance Interval...
# Channel Counter, Maintenance Interval...
maintenance_interval
=
500
maintenance_interval
=
500
def
check_maintenance
(
self
):
def
check_maintenance
(
self
):
if
not
self
.
channel_number
%
self
.
maintenance_interval
:
if
not
self
.
channel_number
%
self
.
maintenance_interval
:
self
.
maintenance
()
self
.
maintenance
()
def
maintenance
(
self
):
def
maintenance
(
self
):
self
.
kill_zombies
()
self
.
kill_zombies
()
# 30-minute zombie timeout. status_handler also knows how to kill zombies.
# 30-minute zombie timeout. status_handler also knows how to kill zombies.
zombie_timeout
=
30
*
60
zombie_timeout
=
30
*
60
def
kill_zombies
(
self
):
def
kill_zombies
(
self
):
now
=
int
(
time
.
time
())
now
=
int
(
time
.
time
())
for
channel
in
asyncore
.
socket_map
.
values
():
for
channel
in
asyncore
.
socket_map
.
values
():
if
channel
.
__class__
==
self
.
__class__
:
if
channel
.
__class__
==
self
.
__class__
:
if
(
now
-
channel
.
creation_time
)
>
channel
.
zombie_timeout
:
if
(
now
-
channel
.
creation_time
)
>
channel
.
zombie_timeout
:
channel
.
close
()
channel
.
close
()
# --------------------------------------------------
# --------------------------------------------------
# send/recv overrides, good place for instrumentation.
# send/recv overrides, good place for instrumentation.
# --------------------------------------------------
# --------------------------------------------------
# this information needs to get into the request object,
# this information needs to get into the request object,
# so that it may log correctly.
# so that it may log correctly.
def
send
(
self
,
data
):
def
send
(
self
,
data
):
result
=
asynchat
.
async_chat
.
send
(
self
,
data
)
result
=
asynchat
.
async_chat
.
send
(
self
,
data
)
self
.
server
.
bytes_out
.
increment
(
len
(
data
))
self
.
server
.
bytes_out
.
increment
(
len
(
data
))
return
result
return
result
def
recv
(
self
,
buffer_size
):
def
recv
(
self
,
buffer_size
):
try
:
try
:
result
=
asynchat
.
async_chat
.
recv
(
self
,
buffer_size
)
result
=
asynchat
.
async_chat
.
recv
(
self
,
buffer_size
)
self
.
server
.
bytes_in
.
increment
(
len
(
result
))
self
.
server
.
bytes_in
.
increment
(
len
(
result
))
return
result
return
result
except
MemoryError
:
except
MemoryError
:
# --- Save a Trip to Your Service Provider ---
# --- Save a Trip to Your Service Provider ---
# It's possible for a process to eat up all the memory of
# It's possible for a process to eat up all the memory of
# the machine, and put it in an extremely wedged state,
# the machine, and put it in an extremely wedged state,
# where medusa keeps running and can't be shut down. This
# where medusa keeps running and can't be shut down. This
# is where MemoryError tends to get thrown, though of
# is where MemoryError tends to get thrown, though of
# course it could get thrown elsewhere.
# course it could get thrown elsewhere.
sys
.
exit
(
"Out of Memory!"
)
sys
.
exit
(
"Out of Memory!"
)
def
handle_error
(
self
):
def
handle_error
(
self
):
t
,
v
=
sys
.
exc_info
()[:
2
]
t
,
v
=
sys
.
exc_info
()[:
2
]
if
t
is
SystemExit
:
if
t
is
SystemExit
:
raise
t
,
v
raise
t
,
v
else
:
else
:
asynchat
.
async_chat
.
handle_error
(
self
)
asynchat
.
async_chat
.
handle_error
(
self
)
def
log
(
self
,
*
args
):
def
log
(
self
,
*
args
):
pass
pass
# --------------------------------------------------
# --------------------------------------------------
# async_chat methods
# async_chat methods
# --------------------------------------------------
# --------------------------------------------------
def
collect_incoming_data
(
self
,
data
):
def
collect_incoming_data
(
self
,
data
):
if
self
.
current_request
:
if
self
.
current_request
:
# we are receiving data (probably POST data) for a request
# we are receiving data (probably POST data) for a request
self
.
current_request
.
collect_incoming_data
(
data
)
self
.
current_request
.
collect_incoming_data
(
data
)
else
:
else
:
# we are receiving header (request) data
# we are receiving header (request) data
self
.
in_buffer
=
self
.
in_buffer
+
data
self
.
in_buffer
=
self
.
in_buffer
+
data
def
found_terminator
(
self
):
def
found_terminator
(
self
):
if
self
.
current_request
:
if
self
.
current_request
:
self
.
current_request
.
found_terminator
()
self
.
current_request
.
found_terminator
()
else
:
else
:
header
=
self
.
in_buffer
header
=
self
.
in_buffer
self
.
in_buffer
=
''
self
.
in_buffer
=
''
lines
=
string
.
split
(
header
,
'
\
r
\
n
'
)
lines
=
string
.
split
(
header
,
'
\
r
\
n
'
)
# --------------------------------------------------
# --------------------------------------------------
# crack the request header
# crack the request header
# --------------------------------------------------
# --------------------------------------------------
while
lines
and
not
lines
[
0
]:
while
lines
and
not
lines
[
0
]:
# as per the suggestion of http-1.1 section 4.1, (and
# as per the suggestion of http-1.1 section 4.1, (and
# Eric Parker <eparker@zyvex.com>), ignore a leading
# Eric Parker <eparker@zyvex.com>), ignore a leading
# blank lines (buggy browsers tack it onto the end of
# blank lines (buggy browsers tack it onto the end of
# POST requests)
# POST requests)
lines
=
lines
[
1
:]
lines
=
lines
[
1
:]
if
not
lines
:
if
not
lines
:
self
.
close_when_done
()
self
.
close_when_done
()
return
return
request
=
lines
[
0
]
request
=
lines
[
0
]
command
,
uri
,
version
=
crack_request
(
request
)
command
,
uri
,
version
=
crack_request
(
request
)
header
=
join_headers
(
lines
[
1
:])
header
=
join_headers
(
lines
[
1
:])
r
=
http_request
(
self
,
request
,
command
,
uri
,
version
,
header
)
r
=
http_request
(
self
,
request
,
command
,
uri
,
version
,
header
)
self
.
request_counter
.
increment
()
self
.
request_counter
.
increment
()
self
.
server
.
total_requests
.
increment
()
self
.
server
.
total_requests
.
increment
()
if
command
is
None
:
if
command
is
None
:
self
.
log_info
(
'Bad HTTP request: %s'
%
request
,
'error'
)
self
.
log_info
(
'Bad HTTP request: %s'
%
request
,
'error'
)
r
.
error
(
400
)
r
.
error
(
400
)
return
return
# --------------------------------------------------
# --------------------------------------------------
# handler selection and dispatch
# handler selection and dispatch
# --------------------------------------------------
# --------------------------------------------------
for
h
in
self
.
server
.
handlers
:
for
h
in
self
.
server
.
handlers
:
if
h
.
match
(
r
):
if
h
.
match
(
r
):
try
:
try
:
self
.
current_request
=
r
self
.
current_request
=
r
# This isn't used anywhere.
# This isn't used anywhere.
# r.handler = h # CYCLE
# r.handler = h # CYCLE
h
.
handle_request
(
r
)
h
.
handle_request
(
r
)
except
:
except
:
self
.
server
.
exceptions
.
increment
()
self
.
server
.
exceptions
.
increment
()
(
file
,
fun
,
line
),
t
,
v
,
tbinfo
=
asyncore
.
compact_traceback
()
(
file
,
fun
,
line
),
t
,
v
,
tbinfo
=
asyncore
.
compact_traceback
()
self
.
log_info
(
self
.
log_info
(
'Server Error: %s, %s: file: %s line: %s'
%
(
t
,
v
,
file
,
line
),
'Server Error: %s, %s: file: %s line: %s'
%
(
t
,
v
,
file
,
line
),
'error'
)
'error'
)
try
:
try
:
r
.
error
(
500
)
r
.
error
(
500
)
except
:
except
:
pass
pass
return
return
# no handlers, so complain
# no handlers, so complain
r
.
error
(
404
)
r
.
error
(
404
)
def
writable
(
self
):
def
writable
(
self
):
# this is just the normal async_chat 'writable', here for comparison
# this is just the normal async_chat 'writable', here for comparison
return
self
.
ac_out_buffer
or
len
(
self
.
producer_fifo
)
return
self
.
ac_out_buffer
or
len
(
self
.
producer_fifo
)
def
writable_for_proxy
(
self
):
def
writable_for_proxy
(
self
):
# this version of writable supports the idea of a 'stalled' producer
# this version of writable supports the idea of a 'stalled' producer
# [i.e., it's not ready to produce any output yet] This is needed by
# [i.e., it's not ready to produce any output yet] This is needed by
# the proxy, which will be waiting for the magic combination of
# the proxy, which will be waiting for the magic combination of
# 1) hostname resolved
# 1) hostname resolved
# 2) connection made
# 2) connection made
# 3) data available.
# 3) data available.
if
self
.
ac_out_buffer
:
if
self
.
ac_out_buffer
:
return
1
return
1
elif
len
(
self
.
producer_fifo
):
elif
len
(
self
.
producer_fifo
):
p
=
self
.
producer_fifo
.
first
()
p
=
self
.
producer_fifo
.
first
()
if
hasattr
(
p
,
'stalled'
):
if
hasattr
(
p
,
'stalled'
):
return
not
p
.
stalled
()
return
not
p
.
stalled
()
else
:
else
:
return
1
return
1
# ===========================================================================
# ===========================================================================
#
HTTP Server Object
#
HTTP Server Object
# ===========================================================================
# ===========================================================================
class
http_server
(
asyncore
.
dispatcher
):
class
http_server
(
asyncore
.
dispatcher
):
SERVER_IDENT
=
'HTTP Server (V%s)'
%
VERSION_STRING
SERVER_IDENT
=
'HTTP Server (V%s)'
%
VERSION_STRING
channel_class
=
http_channel
channel_class
=
http_channel
def
__init__
(
self
,
ip
,
port
,
resolver
=
None
,
logger_object
=
None
):
def
__init__
(
self
,
ip
,
port
,
resolver
=
None
,
logger_object
=
None
):
self
.
ip
=
ip
self
.
ip
=
ip
self
.
port
=
port
self
.
port
=
port
asyncore
.
dispatcher
.
__init__
(
self
)
asyncore
.
dispatcher
.
__init__
(
self
)
self
.
create_socket
(
socket
.
AF_INET
,
socket
.
SOCK_STREAM
)
self
.
create_socket
(
socket
.
AF_INET
,
socket
.
SOCK_STREAM
)
self
.
handlers
=
[]
self
.
handlers
=
[]
if
not
logger_object
:
if
not
logger_object
:
logger_object
=
logger
.
file_logger
(
sys
.
stdout
)
logger_object
=
logger
.
file_logger
(
sys
.
stdout
)
self
.
set_reuse_addr
()
self
.
set_reuse_addr
()
self
.
bind
((
ip
,
port
))
self
.
bind
((
ip
,
port
))
# lower this to 5 if your OS complains
# lower this to 5 if your OS complains
self
.
listen
(
1024
)
self
.
listen
(
1024
)
host
,
port
=
self
.
socket
.
getsockname
()
host
,
port
=
self
.
socket
.
getsockname
()
if
not
ip
:
if
not
ip
:
self
.
log_info
(
'Computing default hostname'
,
'warning'
)
self
.
log_info
(
'Computing default hostname'
,
'warning'
)
ip
=
socket
.
gethostbyname
(
socket
.
gethostname
())
ip
=
socket
.
gethostbyname
(
socket
.
gethostname
())
try
:
try
:
self
.
server_name
=
socket
.
gethostbyaddr
(
ip
)[
0
]
self
.
server_name
=
socket
.
gethostbyaddr
(
ip
)[
0
]
except
socket
.
error
:
except
socket
.
error
:
self
.
log_info
(
'Cannot do reverse lookup'
,
'warning'
)
self
.
log_info
(
'Cannot do reverse lookup'
,
'warning'
)
self
.
server_name
=
ip
# use the IP address as the "hostname"
self
.
server_name
=
ip
# use the IP address as the "hostname"
self
.
server_port
=
port
self
.
server_port
=
port
self
.
total_clients
=
counter
()
self
.
total_clients
=
counter
()
self
.
total_requests
=
counter
()
self
.
total_requests
=
counter
()
self
.
exceptions
=
counter
()
self
.
exceptions
=
counter
()
self
.
bytes_out
=
counter
()
self
.
bytes_out
=
counter
()
self
.
bytes_in
=
counter
()
self
.
bytes_in
=
counter
()
if
not
logger_object
:
if
not
logger_object
:
logger_object
=
logger
.
file_logger
(
sys
.
stdout
)
logger_object
=
logger
.
file_logger
(
sys
.
stdout
)
if
resolver
:
if
resolver
:
self
.
logger
=
logger
.
resolving_logger
(
resolver
,
logger_object
)
self
.
logger
=
logger
.
resolving_logger
(
resolver
,
logger_object
)
else
:
else
:
self
.
logger
=
logger
.
unresolving_logger
(
logger_object
)
self
.
logger
=
logger
.
unresolving_logger
(
logger_object
)
self
.
log_info
(
self
.
log_info
(
'Medusa (V%s) started at %s'
'Medusa (V%s) started at %s'
'
\
n
\
t
Hostname: %s'
'
\
n
\
t
Hostname: %s'
'
\
n
\
t
Port:%d'
'
\
n
\
t
Port:%d'
'
\
n
'
%
(
'
\
n
'
%
(
VERSION_STRING
,
VERSION_STRING
,
time
.
ctime
(
time
.
time
()),
time
.
ctime
(
time
.
time
()),
self
.
server_name
,
self
.
server_name
,
port
,
port
,
)
)
)
)
def
writable
(
self
):
def
writable
(
self
):
return
0
return
0
def
handle_read
(
self
):
def
handle_read
(
self
):
pass
pass
def
readable
(
self
):
def
readable
(
self
):
return
self
.
accepting
return
self
.
accepting
def
handle_connect
(
self
):
def
handle_connect
(
self
):
pass
pass
def
handle_accept
(
self
):
def
handle_accept
(
self
):
self
.
total_clients
.
increment
()
self
.
total_clients
.
increment
()
try
:
try
:
conn
,
addr
=
self
.
accept
()
conn
,
addr
=
self
.
accept
()
except
socket
.
error
:
except
socket
.
error
:
# linux: on rare occasions we get a bogus socket back from
# linux: on rare occasions we get a bogus socket back from
# accept. socketmodule.c:makesockaddr complains that the
# accept. socketmodule.c:makesockaddr complains that the
# address family is unknown. We don't want the whole server
# address family is unknown. We don't want the whole server
# to shut down because of this.
# to shut down because of this.
self
.
log_info
(
'warning: server accept() threw an exception'
,
'warning'
)
self
.
log_info
(
'warning: server accept() threw an exception'
,
'warning'
)
return
return
except
TypeError
:
except
TypeError
:
# unpack non-sequence. this can happen when a read event
# unpack non-sequence. this can happen when a read event
# fires on a listening socket, but when we call accept()
# fires on a listening socket, but when we call accept()
# we get EWOULDBLOCK, so dispatcher.accept() returns None.
# we get EWOULDBLOCK, so dispatcher.accept() returns None.
# Seen on FreeBSD3.
# Seen on FreeBSD3.
self
.
log_info
(
'warning: server accept() threw EWOULDBLOCK'
,
'warning'
)
self
.
log_info
(
'warning: server accept() threw EWOULDBLOCK'
,
'warning'
)
return
return
self
.
channel_class
(
self
,
conn
,
addr
)
self
.
channel_class
(
self
,
conn
,
addr
)
def
install_handler
(
self
,
handler
,
back
=
0
):
def
install_handler
(
self
,
handler
,
back
=
0
):
if
back
:
if
back
:
self
.
handlers
.
append
(
handler
)
self
.
handlers
.
append
(
handler
)
else
:
else
:
self
.
handlers
.
insert
(
0
,
handler
)
self
.
handlers
.
insert
(
0
,
handler
)
def
remove_handler
(
self
,
handler
):
def
remove_handler
(
self
,
handler
):
self
.
handlers
.
remove
(
handler
)
self
.
handlers
.
remove
(
handler
)
def
status
(
self
):
def
status
(
self
):
def
nice_bytes
(
n
):
def
nice_bytes
(
n
):
return
string
.
join
(
status_handler
.
english_bytes
(
n
))
return
string
.
join
(
status_handler
.
english_bytes
(
n
))
handler_stats
=
filter
(
None
,
map
(
maybe_status
,
self
.
handlers
))
handler_stats
=
filter
(
None
,
map
(
maybe_status
,
self
.
handlers
))
if
self
.
total_clients
:
if
self
.
total_clients
:
ratio
=
self
.
total_requests
.
as_long
()
/
float
(
self
.
total_clients
.
as_long
())
ratio
=
self
.
total_requests
.
as_long
()
/
float
(
self
.
total_clients
.
as_long
())
else
:
else
:
ratio
=
0.0
ratio
=
0.0
return
producers
.
composite_producer
(
return
producers
.
composite_producer
(
fifo
([
producers
.
lines_producer
(
fifo
([
producers
.
lines_producer
(
[
'<h2>%s</h2>'
%
self
.
SERVER_IDENT
,
[
'<h2>%s</h2>'
%
self
.
SERVER_IDENT
,
'<br>Listening on: <b>Host:</b> %s'
%
self
.
server_name
,
'<br>Listening on: <b>Host:</b> %s'
%
self
.
server_name
,
'<b>Port:</b> %d'
%
self
.
port
,
'<b>Port:</b> %d'
%
self
.
port
,
'<p><ul>'
'<p><ul>'
'<li>Total <b>Clients:</b> %s'
%
self
.
total_clients
,
'<li>Total <b>Clients:</b> %s'
%
self
.
total_clients
,
'<b>Requests:</b> %s'
%
self
.
total_requests
,
'<b>Requests:</b> %s'
%
self
.
total_requests
,
'<b>Requests/Client:</b> %.1f'
%
(
ratio
),
'<b>Requests/Client:</b> %.1f'
%
(
ratio
),
'<li>Total <b>Bytes In:</b> %s'
%
(
nice_bytes
(
self
.
bytes_in
.
as_long
())),
'<li>Total <b>Bytes In:</b> %s'
%
(
nice_bytes
(
self
.
bytes_in
.
as_long
())),
'<b>Bytes Out:</b> %s'
%
(
nice_bytes
(
self
.
bytes_out
.
as_long
())),
'<b>Bytes Out:</b> %s'
%
(
nice_bytes
(
self
.
bytes_out
.
as_long
())),
'<li>Total <b>Exceptions:</b> %s'
%
self
.
exceptions
,
'<li>Total <b>Exceptions:</b> %s'
%
self
.
exceptions
,
'</ul><p>'
'</ul><p>'
'<b>Extension List</b><ul>'
,
'<b>Extension List</b><ul>'
,
])]
+
handler_stats
+
[
producers
.
simple_producer
(
'</ul>'
)]
])]
+
handler_stats
+
[
producers
.
simple_producer
(
'</ul>'
)]
)
)
)
)
def
maybe_status
(
thing
):
def
maybe_status
(
thing
):
if
hasattr
(
thing
,
'status'
):
if
hasattr
(
thing
,
'status'
):
return
thing
.
status
()
return
thing
.
status
()
else
:
else
:
return
None
return
None
CONNECTION
=
regex
.
compile
(
'Connection:
\
(.*
\
)'
,
regex
.
casefold
)
CONNECTION
=
regex
.
compile
(
'Connection:
\
(.*
\
)'
,
regex
.
casefold
)
# merge multi-line headers
# merge multi-line headers
# [486dx2: ~500/sec]
# [486dx2: ~500/sec]
def
join_headers
(
headers
):
def
join_headers
(
headers
):
r
=
[]
r
=
[]
for
i
in
range
(
len
(
headers
)):
for
i
in
range
(
len
(
headers
)):
if
headers
[
i
][
0
]
in
'
\
t
'
:
if
headers
[
i
][
0
]
in
'
\
t
'
:
r
[
-
1
]
=
r
[
-
1
]
+
headers
[
i
][
1
:]
r
[
-
1
]
=
r
[
-
1
]
+
headers
[
i
][
1
:]
else
:
else
:
r
.
append
(
headers
[
i
])
r
.
append
(
headers
[
i
])
return
r
return
r
def
get_header
(
head_reg
,
lines
,
group
=
1
):
def
get_header
(
head_reg
,
lines
,
group
=
1
):
for
line
in
lines
:
for
line
in
lines
:
if
head_reg
.
match
(
line
)
==
len
(
line
):
if
head_reg
.
match
(
line
)
==
len
(
line
):
return
head_reg
.
group
(
group
)
return
head_reg
.
group
(
group
)
return
''
return
''
REQUEST
=
re
.
compile
(
'([^ ]+) (?:[^ :?#]+://[^ ?#/]*)?([^ ]+)(( HTTP/([0-9.]+))$|$)'
)
REQUEST
=
re
.
compile
(
'([^ ]+) (?:[^ :?#]+://[^ ?#/]*)?([^ ]+)(( HTTP/([0-9.]+))$|$)'
)
...
@@ -683,91 +684,91 @@ def crack_request (r):
...
@@ -683,91 +684,91 @@ def crack_request (r):
if
m
is
not
None
:
if
m
is
not
None
:
return
string
.
lower
(
m
.
group
(
1
)),
m
.
group
(
2
),
m
.
group
(
5
)
return
string
.
lower
(
m
.
group
(
1
)),
m
.
group
(
2
),
m
.
group
(
5
)
else
:
else
:
return
None
,
None
,
None
return
None
,
None
,
None
class
fifo
:
class
fifo
:
def
__init__
(
self
,
list
=
None
):
def
__init__
(
self
,
list
=
None
):
if
not
list
:
if
not
list
:
self
.
list
=
[]
self
.
list
=
[]
else
:
else
:
self
.
list
=
list
self
.
list
=
list
def
__len__
(
self
):
def
__len__
(
self
):
return
len
(
self
.
list
)
return
len
(
self
.
list
)
def
first
(
self
):
def
first
(
self
):
return
self
.
list
[
0
]
return
self
.
list
[
0
]
def
push_front
(
self
,
object
):
def
push_front
(
self
,
object
):
self
.
list
.
insert
(
0
,
object
)
self
.
list
.
insert
(
0
,
object
)
def
push
(
self
,
data
):
def
push
(
self
,
data
):
self
.
list
.
append
(
data
)
self
.
list
.
append
(
data
)
def
pop
(
self
):
def
pop
(
self
):
if
self
.
list
:
if
self
.
list
:
result
=
self
.
list
[
0
]
result
=
self
.
list
[
0
]
del
self
.
list
[
0
]
del
self
.
list
[
0
]
return
(
1
,
result
)
return
(
1
,
result
)
else
:
else
:
return
(
0
,
None
)
return
(
0
,
None
)
def
compute_timezone_for_log
():
def
compute_timezone_for_log
():
if
time
.
daylight
:
if
time
.
daylight
:
tz
=
time
.
altzone
tz
=
time
.
altzone
else
:
else
:
tz
=
time
.
timezone
tz
=
time
.
timezone
if
tz
>
0
:
if
tz
>
0
:
neg
=
1
neg
=
1
else
:
else
:
neg
=
0
neg
=
0
tz
=
-
tz
tz
=
-
tz
h
,
rem
=
divmod
(
tz
,
3600
)
h
,
rem
=
divmod
(
tz
,
3600
)
m
,
rem
=
divmod
(
rem
,
60
)
m
,
rem
=
divmod
(
rem
,
60
)
if
neg
:
if
neg
:
return
'-%02d%02d'
%
(
h
,
m
)
return
'-%02d%02d'
%
(
h
,
m
)
else
:
else
:
return
'+%02d%02d'
%
(
h
,
m
)
return
'+%02d%02d'
%
(
h
,
m
)
# if you run this program over a TZ change boundary, this will be invalid.
# if you run this program over a TZ change boundary, this will be invalid.
tz_for_log
=
compute_timezone_for_log
()
tz_for_log
=
compute_timezone_for_log
()
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
import
sys
import
sys
if
len
(
sys
.
argv
)
<
2
:
if
len
(
sys
.
argv
)
<
2
:
print
'usage: %s <root> <port>'
%
(
sys
.
argv
[
0
])
print
'usage: %s <root> <port>'
%
(
sys
.
argv
[
0
])
else
:
else
:
import
monitor
import
monitor
import
filesys
import
filesys
import
default_handler
import
default_handler
import
status_handler
import
status_handler
import
ftp_server
import
ftp_server
import
chat_server
import
chat_server
import
resolver
import
resolver
import
logger
import
logger
rs
=
resolver
.
caching_resolver
(
'127.0.0.1'
)
rs
=
resolver
.
caching_resolver
(
'127.0.0.1'
)
lg
=
logger
.
file_logger
(
sys
.
stdout
)
lg
=
logger
.
file_logger
(
sys
.
stdout
)
ms
=
monitor
.
secure_monitor_server
(
'fnord'
,
'127.0.0.1'
,
9999
)
ms
=
monitor
.
secure_monitor_server
(
'fnord'
,
'127.0.0.1'
,
9999
)
fs
=
filesys
.
os_filesystem
(
sys
.
argv
[
1
])
fs
=
filesys
.
os_filesystem
(
sys
.
argv
[
1
])
dh
=
default_handler
.
default_handler
(
fs
)
dh
=
default_handler
.
default_handler
(
fs
)
hs
=
http_server
(
''
,
string
.
atoi
(
sys
.
argv
[
2
]),
rs
,
lg
)
hs
=
http_server
(
''
,
string
.
atoi
(
sys
.
argv
[
2
]),
rs
,
lg
)
hs
.
install_handler
(
dh
)
hs
.
install_handler
(
dh
)
ftp
=
ftp_server
.
ftp_server
(
ftp
=
ftp_server
.
ftp_server
(
ftp_server
.
dummy_authorizer
(
sys
.
argv
[
1
]),
ftp_server
.
dummy_authorizer
(
sys
.
argv
[
1
]),
port
=
8021
,
port
=
8021
,
resolver
=
rs
,
resolver
=
rs
,
logger_object
=
lg
logger_object
=
lg
)
)
cs
=
chat_server
.
chat_server
(
''
,
7777
)
cs
=
chat_server
.
chat_server
(
''
,
7777
)
sh
=
status_handler
.
status_extension
([
hs
,
ms
,
ftp
,
cs
,
rs
])
sh
=
status_handler
.
status_extension
([
hs
,
ms
,
ftp
,
cs
,
rs
])
hs
.
install_handler
(
sh
)
hs
.
install_handler
(
sh
)
if
(
'-p'
in
sys
.
argv
):
if
(
'-p'
in
sys
.
argv
):
def
profile_loop
():
def
profile_loop
():
try
:
try
:
asyncore
.
loop
()
asyncore
.
loop
()
except
KeyboardInterrupt
:
except
KeyboardInterrupt
:
pass
pass
import
profile
import
profile
profile
.
run
(
'profile_loop()'
,
'profile.out'
)
profile
.
run
(
'profile_loop()'
,
'profile.out'
)
else
:
else
:
asyncore
.
loop
()
asyncore
.
loop
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment