fix REQUEST_METHOD, add tomfoolery tests

This commit is contained in:
Neale Pickett 2012-03-08 15:29:04 -07:00
parent 71112ac2a7
commit 1394cb3e87
3 changed files with 68 additions and 29 deletions

11
eris.c
View File

@ -520,7 +520,7 @@ handle_request()
if (docgi) { if (docgi) {
p[-2] = 0; p[-2] = 0;
setenv("REQUEST_METHOD", p, 1); setenv("REQUEST_METHOD", request, 1);
} }
/* Interpret path into fspath. */ /* Interpret path into fspath. */
@ -593,8 +593,10 @@ handle_request()
/* Read header fields */ /* Read header fields */
{ {
char *base = buf; char *base = buf;
char *lastchar = base + (sizeof buf) - 2;
int nheaders = 0; int nheaders = 0;
*lastchar = 0;
while (1) { while (1) {
char *cgi_name = base; char *cgi_name = base;
char *p; char *p;
@ -616,18 +618,21 @@ handle_request()
if (NULL == fgets(p, plen, stdin)) { if (NULL == fgets(p, plen, stdin)) {
badrequest(500, "OS Error", "OS error reading headers"); badrequest(500, "OS Error", "OS error reading headers");
} }
if (*lastchar) {
badrequest(431, "Request Header Too Large", "An HTTP header field was too large");
}
len = extract_header_field(p, &val, 1); len = extract_header_field(p, &val, 1);
if (! len) { if (! len) {
/* blank line */ /* blank line */
break; break;
} }
name = p;
if (! val) { if (! val) {
badrequest(400, "Invalid header", "Unable to parse header block"); badrequest(400, "Invalid header", "Unable to parse header block");
} }
name = p;
/* Set up CGI environment variables */ /* Set up CGI environment variables */
if (docgi) { if (docgi) {
setenv(cgi_name, val, 1); setenv(cgi_name, val, 1);

View File

@ -49,7 +49,7 @@ extract_header_field(char *buf, char **val, int cgi)
if (! *val) { if (! *val) {
if (buf[len] == '\n') { if (buf[len] == '\n') {
/* Blank line or incorrectly-formatted header */ /* Blank line or incorrectly-formatted header */
return 0; break;
} else if (buf[len] == ':') { } else if (buf[len] == ':') {
buf[len] = 0; buf[len] = 0;
for (*val = &(buf[len+1]); **val == ' '; *val += 1); for (*val = &(buf[len+1]); **val == ' '; *val += 1);

84
test.sh
View File

@ -6,7 +6,7 @@
H () { H () {
section="$*" section="$*"
printf "\n%-20s: " "$*" printf "\n%-20s " "$*"
} }
title() { title() {
@ -31,29 +31,34 @@ d () {
} }
###
### Make web space
###
mkdir -p default mkdir -p default
echo james > default/index.html echo james > default/index.html
touch default/a touch default/a
cat <<'EOD' > default/a.cgi cat <<'EOD' > default/a.cgi
#! /bin/sh #! /bin/sh
echo 'Content-type: text/plain' echo 'Content-type: text/plain'
ls / > /dev/null # delay a little
echo echo
if [ -n "$CONTENT_LENGTH" ]; then set | sort
echo "t:$CONTENT_TYPE"
echo -n "v:"
dd bs=1 count=$CONTENT_LENGTH 2>/dev/null
else
echo ${QUERY_STRING:-james}
fi
EOD EOD
chmod +x default/a.cgi chmod +x default/a.cgi
mkdir -p default/empty mkdir -p default/empty
mkdir -p default/subdir
touch default/subdir/a
touch default/subdir/.hidden
###
###
###
echo "HTTPD: $HTTPD " echo "HTTPD: $HTTPD "
echo "CGI: $HTTPD_CGI " echo "CGI: $HTTPD_CGI "
echo "IDX: $HTTPD_IDX " echo "IDX: $HTTPD_IDX "
H "Basic tests" H "Basic tests"
title "GET /index.html" title "GET /index.html"
@ -77,6 +82,9 @@ printf 'GET / HTTP/1.12\r\n\r\n' | $HTTPD 2>/dev/null | d | grep -q 'HTTP/1.. 50
title "Bare newline" title "Bare newline"
printf 'GET / HTTP/1.0\n\n' | $HTTPD 2>/dev/null | grep -q 'james' && pass || fail printf 'GET / HTTP/1.0\n\n' | $HTTPD 2>/dev/null | grep -q 'james' && pass || fail
title "No trailing slash"
printf 'GET /empty HTTP/1.0\r\n\r\n' | $HTTPD 2>/dev/null | d | grep -q '301 Redirect#%.*Location: /empty/#%' && pass || fail
title "Logging /" title "Logging /"
(printf 'GET / HTTP/1.1\r\nHost: host\r\n\r\n' | (printf 'GET / HTTP/1.1\r\nHost: host\r\n\r\n' |
PROTO=TCP TCPREMOTEPORT=1234 TCPREMOTEIP=10.0.0.2 $HTTPD >/dev/null) 2>&1 | grep -q '^10.0.0.2 200 6 host (null) (null) /$' && pass || fail PROTO=TCP TCPREMOTEPORT=1234 TCPREMOTEIP=10.0.0.2 $HTTPD >/dev/null) 2>&1 | grep -q '^10.0.0.2 200 6 host (null) (null) /$' && pass || fail
@ -86,32 +94,42 @@ title "Logging /index.html"
PROTO=TCP TCPREMOTEPORT=1234 TCPREMOTEIP=10.0.0.2 $HTTPD >/dev/null) 2>&1 | grep -q '^10.0.0.2 200 6 host (null) (null) /index.html$' && pass || fail PROTO=TCP TCPREMOTEPORT=1234 TCPREMOTEIP=10.0.0.2 $HTTPD >/dev/null) 2>&1 | grep -q '^10.0.0.2 200 6 host (null) (null) /index.html$' && pass || fail
H "High weirdness"
# "Huge header" H "Tomfoolery"
# "Huge header across MAXHEADERLEN"
# "Too many headers" title "Non-header"
printf 'GET / HTTP/1.0\r\na: b\r\nfoo\r\n\r\n' | $HTTPD 2>/dev/null | grep -q 'HTTP/1.. 400 ' && pass || fail
title "Huge header field"
(printf 'GET / HTTP/1.0\r\nHeader: '
dd if=/dev/zero bs=1k count=9 2>/dev/null | tr '\0' '.'
printf '\r\n\r\n') | $HTTPD 2>/dev/null | grep -q 'HTTP/1.. 431 ' && pass || fail
title "Too many headers"
(printf 'GET / HTTP/1.0\r\n'
for i in $(seq 500); do
printf 'Header: val\r\n'
done
printf '\r\n') | $HTTPD 2>/dev/null | grep -q 'HTTP/1.. 431 ' && pass || fail
H "If-Modified-Since" H "If-Modified-Since"
title "Has been modified" title "Has been modified"
printf 'GET / HTTP/1.0\r\nIf-Modified-Since: Sun, 27 Feb 1980 12:12:12 GMT\r\n\r\n' | $HTTPD 2>/dev/null | grep -q '200 OK' && pass || fail printf 'GET / HTTP/1.0\r\nIf-Modified-Since: Sun, 27 Feb 1980 12:12:12 GMT\r\n\r\n' | $HTTPD 2>/dev/null | grep -q 'HTTP/1.. 200 ' && pass || fail
title "RFC 822 Date" title "RFC 822 Date"
printf 'GET / HTTP/1.0\r\nIf-Modified-Since: Sun, 27 Feb 2030 12:12:12 GMT\r\n\r\n' | $HTTPD 2>/dev/null | grep -q '304 Not Changed' && pass || fail printf 'GET / HTTP/1.0\r\nIf-Modified-Since: Sun, 27 Feb 2030 12:12:12 GMT\r\n\r\n' | $HTTPD 2>/dev/null | grep -q 'HTTP/1.. 304 ' && pass || fail
title "RFC 850 Date" title "RFC 850 Date"
printf 'GET / HTTP/1.0\r\nIf-Modified-Since: Sunday, 27-Feb-30 12:12:12 GMT\r\n\r\n' | $HTTPD 2>/dev/null | grep -q '304 Not Changed' && pass || fail printf 'GET / HTTP/1.0\r\nIf-Modified-Since: Sunday, 27-Feb-30 12:12:12 GMT\r\n\r\n' | $HTTPD 2>/dev/null | grep -q 'HTTP/1.. 304 ' && pass || fail
title "RFC 850 Thursday" title "RFC 850 Thursday"
printf 'GET / HTTP/1.0\r\nIf-Modified-Since: Thursday, 27-Feb-30 12:12:12 GMT\r\n\r\n' | $HTTPD 2>/dev/null | grep -q '304 Not Changed' && pass || fail printf 'GET / HTTP/1.0\r\nIf-Modified-Since: Thursday, 27-Feb-30 12:12:12 GMT\r\n\r\n' | $HTTPD 2>/dev/null | grep -q 'HTTP/1.. 304 ' && pass || fail
title "ANSI C Date" title "ANSI C Date"
printf 'GET / HTTP/1.0\r\nIf-Modified-Since: Sun Feb 27 12:12:12 2030\r\n\r\n' | $HTTPD 2>/dev/null | grep -q '304 Not Changed' && pass || fail printf 'GET / HTTP/1.0\r\nIf-Modified-Since: Sun Feb 27 12:12:12 2030\r\n\r\n' | $HTTPD 2>/dev/null | grep -q 'HTTP/1.. 304 ' && pass || fail
title "No trailing slash"
printf 'GET /empty HTTP/1.0\r\n\r\n' | $HTTPD 2>/dev/null | d | grep -q '301 Redirect#%.*Location: /empty/#%' && pass || fail
@ -120,20 +138,36 @@ H "Directory indexing"
title "Basic index" title "Basic index"
printf 'GET /empty/ HTTP/1.0\r\n\r\n' | $HTTPD_IDX 2>/dev/null | d | grep -Fq '<h1>Directory Listing: /empty/</h1><pre><a href="../">Parent Directory</a>%</pre>' && pass || fail printf 'GET /empty/ HTTP/1.0\r\n\r\n' | $HTTPD_IDX 2>/dev/null | d | grep -Fq '<h1>Directory Listing: /empty/</h1><pre><a href="../">Parent Directory</a>%</pre>' && pass || fail
title "Hidden file"
printf 'GET /subdir/ HTTP/1.0\r\n\r\n' | $HTTPD_IDX 2>/dev/null | grep -q 'hidden' && fail || pass
H "CGI" H "CGI"
title "Basic CGI" title "Basic CGI"
printf 'GET /a.cgi HTTP/1.0\r\n\r\n' | $HTTPD_CGI 2>/dev/null | d | grep -q 'HTTP/1.0 200 OK#%Server: .*#%Connection: close#%Pragma: no-cache#%Content-type: text/plain#%#%james%' && pass || fail printf 'GET /a.cgi HTTP/1.0\r\n\r\n' | \
$HTTPD_CGI 2>/dev/null | d | grep -Eq 'HTTP/1.0 200 OK#%Server: .*#%Connection: close#%Pragma: no-cache#%Content-type: text/plain#%#%.*%GATEWAY_INTERFACE=.?CGI/1.1.?%' && pass || fail
title "REQUEST_METHOD"
printf 'GET /a.cgi HTTP/1.0\r\n\r\n' | \
$HTTPD_CGI 2>/dev/null | grep -Eq 'REQUEST_METHOD=.?GET.?$' && pass || fail
title "GET with arguments" title "GET with arguments"
printf 'GET /a.cgi?foo HTTP/1.0\r\n\r\n' | $HTTPD_CGI 2>/dev/null | d | grep -q 'HTTP/1.0 200 OK#%Server: .*#%Connection: close#%Pragma: no-cache#%Content-type: text/plain#%#%foo%' && pass || fail printf 'GET /a.cgi?foo HTTP/1.0\r\n\r\n' | \
$HTTPD_CGI 2>/dev/null | grep -Eq 'QUERY_STRING=.?foo.?$' && pass || fail
title "GET with complex args"
printf 'GET /a.cgi?t=New+Mexico+Land+Of+Enchantment&s=LG8+LV32+R4+G32+LG32+Y4+LG4 HTTP/1.0\r\n\r\n' | \
$HTTPD_CGI 2>/dev/null | d | grep -Fq 't=New+Mexico' && pass || fail
title "POST" title "POST"
printf 'POST /a.cgi HTTP/1.0\r\nContent-Type: moo\r\nContent-Length: 3\r\n\r\narf' | $HTTPD_CGI 2>/dev/null | d | grep -q 't:moo%v:arf$' && pass || fail printf 'POST /a.cgi HTTP/1.0\r\nContent-Type: moo\r\nContent-Length: 3\r\n\r\narf' | \
$HTTPD_CGI 2>/dev/null | d | grep -Eq '%CONTENT_LENGTH=.?3.?%CONTENT_TYPE=.?moo.?%' && pass || fail
title "PATH_INFO" title "PATH_INFO"
printf 'GET /a.cgi/merf HTTP/1.0\r\n\r\n' | $HTTPD_CGI 2>/dev/null | grep -q '200' && pass || fail printf 'GET /a.cgi/merf HTTP/1.0\r\n\r\n' | $HTTPD_CGI 2>/dev/null | grep -q '^PATH_INFO=/merf$' && pass || fail
H "fnord bugs" H "fnord bugs"