aboutsummaryrefslogtreecommitdiffstats
path: root/netlib/http/http1
diff options
context:
space:
mode:
Diffstat (limited to 'netlib/http/http1')
-rw-r--r--netlib/http/http1/__init__.py23
-rw-r--r--netlib/http/http1/assemble.py105
-rw-r--r--netlib/http/http1/protocol.py586
-rw-r--r--netlib/http/http1/read.py346
4 files changed, 473 insertions, 587 deletions
diff --git a/netlib/http/http1/__init__.py b/netlib/http/http1/__init__.py
index 6b5043af..4d223f97 100644
--- a/netlib/http/http1/__init__.py
+++ b/netlib/http/http1/__init__.py
@@ -1 +1,22 @@
-from protocol import *
+from .read import (
+ read_request, read_request_head,
+ read_response, read_response_head,
+ read_message_body, read_message_body_chunked,
+ connection_close,
+ expected_http_body_size,
+)
+from .assemble import (
+ assemble_request, assemble_request_head,
+ assemble_response, assemble_response_head,
+)
+
+
+__all__ = [
+ "read_request", "read_request_head",
+ "read_response", "read_response_head",
+ "read_message_body", "read_message_body_chunked",
+ "connection_close",
+ "expected_http_body_size",
+ "assemble_request", "assemble_request_head",
+ "assemble_response", "assemble_response_head",
+]
diff --git a/netlib/http/http1/assemble.py b/netlib/http/http1/assemble.py
new file mode 100644
index 00000000..a3269eed
--- /dev/null
+++ b/netlib/http/http1/assemble.py
@@ -0,0 +1,105 @@
+from __future__ import absolute_import, print_function, division
+
+from ... import utils
+from ...exceptions import HttpException
+from .. import CONTENT_MISSING
+
+
+def assemble_request(request):
+ if request.body == CONTENT_MISSING:
+ raise HttpException("Cannot assemble flow with CONTENT_MISSING")
+ head = assemble_request_head(request)
+ return head + request.body
+
+
+def assemble_request_head(request):
+ first_line = _assemble_request_line(request)
+ headers = _assemble_request_headers(request)
+ return b"%s\r\n%s\r\n" % (first_line, headers)
+
+
+def assemble_response(response):
+ if response.body == CONTENT_MISSING:
+ raise HttpException("Cannot assemble flow with CONTENT_MISSING")
+ head = assemble_response_head(response)
+ return head + response.body
+
+
+def assemble_response_head(response):
+ first_line = _assemble_response_line(response)
+ headers = _assemble_response_headers(response)
+ return b"%s\r\n%s\r\n" % (first_line, headers)
+
+
+
+
+def _assemble_request_line(request, form=None):
+ if form is None:
+ form = request.form_out
+ if form == "relative":
+ return b"%s %s %s" % (
+ request.method,
+ request.path,
+ request.httpversion
+ )
+ elif form == "authority":
+ return b"%s %s:%d %s" % (
+ request.method,
+ request.host,
+ request.port,
+ request.httpversion
+ )
+ elif form == "absolute":
+ return b"%s %s://%s:%s%s %s" % (
+ request.method,
+ request.scheme,
+ request.host,
+ request.port,
+ request.path,
+ request.httpversion
+ )
+ else: # pragma: nocover
+ raise RuntimeError("Invalid request form")
+
+
+def _assemble_request_headers(request):
+ headers = request.headers.copy()
+ for k in request._headers_to_strip_off:
+ headers.pop(k, None)
+ if b"host" not in headers and request.scheme and request.host and request.port:
+ headers[b"Host"] = utils.hostport(
+ request.scheme,
+ request.host,
+ request.port
+ )
+
+ # If content is defined (i.e. not None or CONTENT_MISSING), we always
+ # add a content-length header.
+ if request.body or request.body == b"":
+ headers[b"Content-Length"] = str(len(request.body)).encode("ascii")
+
+ return str(headers)
+
+
+def _assemble_response_line(response):
+ return b"%s %s %s" % (
+ response.httpversion,
+ response.status_code,
+ response.msg,
+ )
+
+
+def _assemble_response_headers(response, preserve_transfer_encoding=False):
+ # TODO: Remove preserve_transfer_encoding
+ headers = response.headers.copy()
+ for k in response._headers_to_strip_off:
+ headers.pop(k, None)
+ if not preserve_transfer_encoding:
+ headers.pop(b"Transfer-Encoding", None)
+
+ # If body is defined (i.e. not None or CONTENT_MISSING), we always
+ # add a content-length header.
+ if response.body or response.body == b"":
+ headers[b"Content-Length"] = str(len(response.body)).encode("ascii")
+
+ return bytes(headers)
diff --git a/netlib/http/http1/protocol.py b/netlib/http/http1/protocol.py
deleted file mode 100644
index cf1dffa3..00000000
--- a/netlib/http/http1/protocol.py
+++ /dev/null
@@ -1,586 +0,0 @@
-from __future__ import (absolute_import, print_function, division)
-import string
-import sys
-import time
-
-from ... import utils, tcp, http
-from .. import semantics, Headers
-from ..exceptions import *
-
-
-class TCPHandler(object):
-
- def __init__(self, rfile, wfile=None):
- self.rfile = rfile
- self.wfile = wfile
-
-
-class HTTP1Protocol(semantics.ProtocolMixin):
-
- ALPN_PROTO_HTTP1 = 'http/1.1'
-
- def __init__(self, tcp_handler=None, rfile=None, wfile=None):
- self.tcp_handler = tcp_handler or TCPHandler(rfile, wfile)
-
- def read_request(
- self,
- include_body=True,
- body_size_limit=None,
- allow_empty=False,
- ):
- """
- Parse an HTTP request from a file stream
-
- Args:
- include_body (bool): Read response body as well
- body_size_limit (bool): Maximum body size
- wfile (file): If specified, HTTP Expect headers are handled
- automatically, by writing a HTTP 100 CONTINUE response to the stream.
-
- Returns:
- Request: The HTTP request
-
- Raises:
- HttpError: If the input is invalid.
- """
- timestamp_start = time.time()
- if hasattr(self.tcp_handler.rfile, "reset_timestamps"):
- self.tcp_handler.rfile.reset_timestamps()
-
- httpversion, host, port, scheme, method, path, headers, body = (
- None, None, None, None, None, None, None, None)
-
- request_line = self._get_request_line()
- if not request_line:
- if allow_empty:
- return http.EmptyRequest()
- else:
- raise tcp.NetLibDisconnect()
-
- request_line_parts = self._parse_init(request_line)
- if not request_line_parts:
- raise HttpError(
- 400,
- "Bad HTTP request line: %s" % repr(request_line)
- )
- method, path, httpversion = request_line_parts
-
- if path == '*' or path.startswith("/"):
- form_in = "relative"
- if not utils.isascii(path):
- raise HttpError(
- 400,
- "Bad HTTP request line: %s" % repr(request_line)
- )
- elif method == 'CONNECT':
- form_in = "authority"
- r = self._parse_init_connect(request_line)
- if not r:
- raise HttpError(
- 400,
- "Bad HTTP request line: %s" % repr(request_line)
- )
- host, port, httpversion = r
- path = None
- else:
- form_in = "absolute"
- r = self._parse_init_proxy(request_line)
- if not r:
- raise HttpError(
- 400,
- "Bad HTTP request line: %s" % repr(request_line)
- )
- _, scheme, host, port, path, _ = r
-
- headers = self.read_headers()
- if headers is None:
- raise HttpError(400, "Invalid headers")
-
- expect_header = headers.get("expect", "").lower()
- if expect_header == "100-continue" and httpversion == (1, 1):
- self.tcp_handler.wfile.write(
- 'HTTP/1.1 100 Continue\r\n'
- '\r\n'
- )
- self.tcp_handler.wfile.flush()
- del headers['expect']
-
- if include_body:
- body = self.read_http_body(
- headers,
- body_size_limit,
- method,
- None,
- True
- )
-
- if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"):
- # more accurate timestamp_start
- timestamp_start = self.tcp_handler.rfile.first_byte_timestamp
-
- timestamp_end = time.time()
-
- return http.Request(
- form_in,
- method,
- scheme,
- host,
- port,
- path,
- httpversion,
- headers,
- body,
- timestamp_start,
- timestamp_end,
- )
-
- def read_response(
- self,
- request_method,
- body_size_limit=None,
- include_body=True,
- ):
- """
- Returns an http.Response
-
- By default, both response header and body are read.
- If include_body=False is specified, body may be one of the
- following:
- - None, if the response is technically allowed to have a response body
- - "", if the response must not have a response body (e.g. it's a
- response to a HEAD request)
- """
- timestamp_start = time.time()
- if hasattr(self.tcp_handler.rfile, "reset_timestamps"):
- self.tcp_handler.rfile.reset_timestamps()
-
- line = self.tcp_handler.rfile.readline()
- # Possible leftover from previous message
- if line == "\r\n" or line == "\n":
- line = self.tcp_handler.rfile.readline()
- if not line:
- raise HttpErrorConnClosed(502, "Server disconnect.")
- parts = self.parse_response_line(line)
- if not parts:
- raise HttpError(502, "Invalid server response: %s" % repr(line))
- proto, code, msg = parts
- httpversion = self._parse_http_protocol(proto)
- if httpversion is None:
- raise HttpError(502, "Invalid HTTP version in line: %s" % repr(proto))
- headers = self.read_headers()
- if headers is None:
- raise HttpError(502, "Invalid headers.")
-
- if include_body:
- body = self.read_http_body(
- headers,
- body_size_limit,
- request_method,
- code,
- False
- )
- else:
- # if include_body==False then a None body means the body should be
- # read separately
- body = None
-
- if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"):
- # more accurate timestamp_start
- timestamp_start = self.tcp_handler.rfile.first_byte_timestamp
-
- if include_body:
- timestamp_end = time.time()
- else:
- timestamp_end = None
-
- return http.Response(
- httpversion,
- code,
- msg,
- headers,
- body,
- timestamp_start=timestamp_start,
- timestamp_end=timestamp_end,
- )
-
- def assemble_request(self, request):
- assert isinstance(request, semantics.Request)
-
- if request.body == semantics.CONTENT_MISSING:
- raise http.HttpError(
- 502,
- "Cannot assemble flow with CONTENT_MISSING"
- )
- first_line = self._assemble_request_first_line(request)
- headers = self._assemble_request_headers(request)
- return "%s\r\n%s\r\n%s" % (first_line, headers, request.body)
-
- def assemble_response(self, response):
- assert isinstance(response, semantics.Response)
-
- if response.body == semantics.CONTENT_MISSING:
- raise http.HttpError(
- 502,
- "Cannot assemble flow with CONTENT_MISSING"
- )
- first_line = self._assemble_response_first_line(response)
- headers = self._assemble_response_headers(response)
- return "%s\r\n%s\r\n%s" % (first_line, headers, response.body)
-
- def read_headers(self):
- """
- Read a set of headers.
- Stop once a blank line is reached.
-
- Return a Header object, or None if headers are invalid.
- """
- ret = []
- while True:
- line = self.tcp_handler.rfile.readline()
- if not line or line == '\r\n' or line == '\n':
- break
- if line[0] in ' \t':
- if not ret:
- return None
- # continued header
- ret[-1][1] = ret[-1][1] + '\r\n ' + line.strip()
- else:
- i = line.find(':')
- # We're being liberal in what we accept, here.
- if i > 0:
- name = line[:i]
- value = line[i + 1:].strip()
- ret.append([name, value])
- else:
- return None
- return Headers(ret)
-
-
- def read_http_body(self, *args, **kwargs):
- return "".join(self.read_http_body_chunked(*args, **kwargs))
-
-
- def read_http_body_chunked(
- self,
- headers,
- limit,
- request_method,
- response_code,
- is_request,
- max_chunk_size=None
- ):
- """
- Read an HTTP message body:
- headers: A Header object
- limit: Size limit.
- is_request: True if the body to read belongs to a request, False
- otherwise
- """
- if max_chunk_size is None:
- max_chunk_size = limit or sys.maxsize
-
- expected_size = self.expected_http_body_size(
- headers, is_request, request_method, response_code
- )
-
- if expected_size is None:
- if self.has_chunked_encoding(headers):
- # Python 3: yield from
- for x in self._read_chunked(limit, is_request):
- yield x
- else: # pragma: nocover
- raise HttpError(
- 400 if is_request else 502,
- "Content-Length unknown but no chunked encoding"
- )
- elif expected_size >= 0:
- if limit is not None and expected_size > limit:
- raise HttpError(
- 400 if is_request else 509,
- "HTTP Body too large. Limit is %s, content-length was %s" % (
- limit, expected_size
- )
- )
- bytes_left = expected_size
- while bytes_left:
- chunk_size = min(bytes_left, max_chunk_size)
- content = self.tcp_handler.rfile.read(chunk_size)
- yield content
- bytes_left -= chunk_size
- else:
- bytes_left = limit or -1
- while bytes_left:
- chunk_size = min(bytes_left, max_chunk_size)
- content = self.tcp_handler.rfile.read(chunk_size)
- if not content:
- return
- yield content
- bytes_left -= chunk_size
- not_done = self.tcp_handler.rfile.read(1)
- if not_done:
- raise HttpError(
- 400 if is_request else 509,
- "HTTP Body too large. Limit is %s," % limit
- )
-
- @classmethod
- def expected_http_body_size(
- self,
- headers,
- is_request,
- request_method,
- response_code,
- ):
- """
- Returns the expected body length:
- - a positive integer, if the size is known in advance
- - None, if the size in unknown in advance (chunked encoding or invalid
- data)
- - -1, if all data should be read until end of stream.
-
- May raise HttpError.
- """
- # Determine response size according to
- # http://tools.ietf.org/html/rfc7230#section-3.3
- if request_method:
- request_method = request_method.upper()
-
- if (not is_request and (
- request_method == "HEAD" or
- (request_method == "CONNECT" and response_code == 200) or
- response_code in [204, 304] or
- 100 <= response_code <= 199)):
- return 0
- if self.has_chunked_encoding(headers):
- return None
- if "content-length" in headers:
- try:
- size = int(headers["content-length"])
- if size < 0:
- raise ValueError()
- return size
- except ValueError:
- return None
- if is_request:
- return 0
- return -1
-
-
- @classmethod
- def has_chunked_encoding(self, headers):
- return "chunked" in headers.get("transfer-encoding", "").lower()
-
-
- def _get_request_line(self):
- """
- Get a line, possibly preceded by a blank.
- """
- line = self.tcp_handler.rfile.readline()
- if line == "\r\n" or line == "\n":
- # Possible leftover from previous message
- line = self.tcp_handler.rfile.readline()
- return line
-
- def _read_chunked(self, limit, is_request):
- """
- Read a chunked HTTP body.
-
- May raise HttpError.
- """
- # FIXME: Should check if chunked is the final encoding in the headers
- # http://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-16#section-3.3
- # 3.3 2.
- total = 0
- code = 400 if is_request else 502
- while True:
- line = self.tcp_handler.rfile.readline(128)
- if line == "":
- raise HttpErrorConnClosed(code, "Connection closed prematurely")
- if line != '\r\n' and line != '\n':
- try:
- length = int(line, 16)
- except ValueError:
- raise HttpError(
- code,
- "Invalid chunked encoding length: %s" % line
- )
- total += length
- if limit is not None and total > limit:
- msg = "HTTP Body too large. Limit is %s," \
- " chunked content longer than %s" % (limit, total)
- raise HttpError(code, msg)
- chunk = self.tcp_handler.rfile.read(length)
- suffix = self.tcp_handler.rfile.readline(5)
- if suffix != '\r\n':
- raise HttpError(code, "Malformed chunked body")
- if length == 0:
- return
- yield chunk
-
- @classmethod
- def _parse_http_protocol(self, line):
- """
- Parse an HTTP protocol declaration.
- Returns a (major, minor) tuple, or None.
- """
- if not line.startswith("HTTP/"):
- return None
- _, version = line.split('/', 1)
- if "." not in version:
- return None
- major, minor = version.split('.', 1)
- try:
- major = int(major)
- minor = int(minor)
- except ValueError:
- return None
- return major, minor
-
- @classmethod
- def _parse_init(self, line):
- try:
- method, url, protocol = string.split(line)
- except ValueError:
- return None
- httpversion = self._parse_http_protocol(protocol)
- if not httpversion:
- return None
- if not utils.isascii(method):
- return None
- return method, url, httpversion
-
- @classmethod
- def _parse_init_connect(self, line):
- """
- Returns (host, port, httpversion) if line is a valid CONNECT line.
- http://tools.ietf.org/html/draft-luotonen-web-proxy-tunneling-01 section 3.1
- """
- v = self._parse_init(line)
- if not v:
- return None
- method, url, httpversion = v
-
- if method.upper() != 'CONNECT':
- return None
- try:
- host, port = url.split(":")
- except ValueError:
- return None
- try:
- port = int(port)
- except ValueError:
- return None
- if not utils.is_valid_port(port):
- return None
- if not utils.is_valid_host(host):
- return None
- return host, port, httpversion
-
- @classmethod
- def _parse_init_proxy(self, line):
- v = self._parse_init(line)
- if not v:
- return None
- method, url, httpversion = v
-
- parts = utils.parse_url(url)
- if not parts:
- return None
- scheme, host, port, path = parts
- return method, scheme, host, port, path, httpversion
-
- @classmethod
- def _parse_init_http(self, line):
- """
- Returns (method, url, httpversion)
- """
- v = self._parse_init(line)
- if not v:
- return None
- method, url, httpversion = v
- if not utils.isascii(url):
- return None
- if not (url.startswith("/") or url == "*"):
- return None
- return method, url, httpversion
-
- @classmethod
- def connection_close(self, httpversion, headers):
- """
- Checks the message to see if the client connection should be closed
- according to RFC 2616 Section 8.1 Note that a connection should be
- closed as well if the response has been read until end of the stream.
- """
- # At first, check if we have an explicit Connection header.
- if "connection" in headers:
- toks = utils.get_header_tokens(headers, "connection")
- if "close" in toks:
- return True
- elif "keep-alive" in toks:
- return False
-
- # If we don't have a Connection header, HTTP 1.1 connections are assumed to
- # be persistent
- return httpversion != (1, 1)
-
- @classmethod
- def parse_response_line(self, line):
- parts = line.strip().split(" ", 2)
- if len(parts) == 2: # handle missing message gracefully
- parts.append("")
- if len(parts) != 3:
- return None
- proto, code, msg = parts
- try:
- code = int(code)
- except ValueError:
- return None
- return (proto, code, msg)
-
- @classmethod
- def _assemble_request_first_line(self, request):
- return request.legacy_first_line()
-
- def _assemble_request_headers(self, request):
- headers = request.headers.copy()
- for k in request._headers_to_strip_off:
- headers.pop(k, None)
- if 'host' not in headers and request.scheme and request.host and request.port:
- headers["Host"] = utils.hostport(
- request.scheme,
- request.host,
- request.port
- )
-
- # If content is defined (i.e. not None or CONTENT_MISSING), we always
- # add a content-length header.
- if request.body or request.body == "":
- headers["Content-Length"] = str(len(request.body))
-
- return str(headers)
-
- def _assemble_response_first_line(self, response):
- return 'HTTP/%s.%s %s %s' % (
- response.httpversion[0],
- response.httpversion[1],
- response.status_code,
- response.msg,
- )
-
- def _assemble_response_headers(
- self,
- response,
- preserve_transfer_encoding=False,
- ):
- headers = response.headers.copy()
- for k in response._headers_to_strip_off:
- headers.pop(k, None)
- if not preserve_transfer_encoding:
- headers.pop('Transfer-Encoding', None)
-
- # If body is defined (i.e. not None or CONTENT_MISSING), we always
- # add a content-length header.
- if response.body or response.body == "":
- headers["Content-Length"] = str(len(response.body))
-
- return str(headers)
diff --git a/netlib/http/http1/read.py b/netlib/http/http1/read.py
new file mode 100644
index 00000000..573bc739
--- /dev/null
+++ b/netlib/http/http1/read.py
@@ -0,0 +1,346 @@
+from __future__ import absolute_import, print_function, division
+import time
+import sys
+import re
+
+from ... import utils
+from ...exceptions import HttpReadDisconnect, HttpSyntaxException, HttpException
+from .. import Request, Response, Headers
+
+ALPN_PROTO_HTTP1 = 'http/1.1'
+
+
+def read_request(rfile, body_size_limit=None):
+ request = read_request_head(rfile)
+ request.body = read_message_body(rfile, request, limit=body_size_limit)
+ request.timestamp_end = time.time()
+ return request
+
+
+def read_request_head(rfile):
+ """
+ Parse an HTTP request head (request line + headers) from an input stream
+
+ Args:
+ rfile: The input stream
+ body_size_limit (bool): Maximum body size
+
+ Returns:
+ The HTTP request object
+
+ Raises:
+ HttpReadDisconnect: If no bytes can be read from rfile.
+ HttpSyntaxException: If the input is invalid.
+ HttpException: A different error occured.
+ """
+ timestamp_start = time.time()
+ if hasattr(rfile, "reset_timestamps"):
+ rfile.reset_timestamps()
+
+ form, method, scheme, host, port, path, http_version = _read_request_line(rfile)
+ headers = _read_headers(rfile)
+
+ if hasattr(rfile, "first_byte_timestamp"):
+ # more accurate timestamp_start
+ timestamp_start = rfile.first_byte_timestamp
+
+ return Request(
+ form, method, scheme, host, port, path, http_version, headers, None, timestamp_start
+ )
+
+
+def read_response(rfile, request, body_size_limit=None):
+ response = read_response_head(rfile)
+ response.body = read_message_body(rfile, request, response, body_size_limit)
+ response.timestamp_end = time.time()
+ return response
+
+
+def read_response_head(rfile):
+ timestamp_start = time.time()
+ if hasattr(rfile, "reset_timestamps"):
+ rfile.reset_timestamps()
+
+ http_version, status_code, message = _read_response_line(rfile)
+ headers = _read_headers(rfile)
+
+ if hasattr(rfile, "first_byte_timestamp"):
+ # more accurate timestamp_start
+ timestamp_start = rfile.first_byte_timestamp
+
+ return Response(
+ http_version,
+ status_code,
+ message,
+ headers,
+ None,
+ timestamp_start
+ )
+
+
+def read_message_body(*args, **kwargs):
+ chunks = read_message_body_chunked(*args, **kwargs)
+ return b"".join(chunks)
+
+
+def read_message_body_chunked(rfile, request, response=None, limit=None, max_chunk_size=None):
+ """
+ Read an HTTP message body:
+
+ Args:
+ If a request body should be read, only request should be passed.
+ If a response body should be read, both request and response should be passed.
+
+ Raises:
+ HttpException
+ """
+ if not response:
+ headers = request.headers
+ response_code = None
+ is_request = True
+ else:
+ headers = response.headers
+ response_code = response.status_code
+ is_request = False
+
+ if not limit or limit < 0:
+ limit = sys.maxsize
+ if not max_chunk_size:
+ max_chunk_size = limit
+
+ expected_size = expected_http_body_size(
+ headers, is_request, request.method, response_code
+ )
+
+ if expected_size is None:
+ for x in _read_chunked(rfile, limit):
+ yield x
+ elif expected_size >= 0:
+ if limit is not None and expected_size > limit:
+ raise HttpException(
+ "HTTP Body too large. "
+ "Limit is {}, content length was advertised as {}".format(limit, expected_size)
+ )
+ bytes_left = expected_size
+ while bytes_left:
+ chunk_size = min(bytes_left, max_chunk_size)
+ content = rfile.read(chunk_size)
+ yield content
+ bytes_left -= chunk_size
+ else:
+ bytes_left = limit
+ while bytes_left:
+ chunk_size = min(bytes_left, max_chunk_size)
+ content = rfile.read(chunk_size)
+ if not content:
+ return
+ yield content
+ bytes_left -= chunk_size
+ not_done = rfile.read(1)
+ if not_done:
+ raise HttpException("HTTP body too large. Limit is {}.".format(limit))
+
+
+def connection_close(http_version, headers):
+ """
+ Checks the message to see if the client connection should be closed
+ according to RFC 2616 Section 8.1.
+ """
+ # At first, check if we have an explicit Connection header.
+ if b"connection" in headers:
+ toks = utils.get_header_tokens(headers, "connection")
+ if b"close" in toks:
+ return True
+ elif b"keep-alive" in toks:
+ return False
+
+ # If we don't have a Connection header, HTTP 1.1 connections are assumed to
+ # be persistent
+ return http_version != (1, 1)
+
+
+def expected_http_body_size(
+ headers,
+ is_request,
+ request_method,
+ response_code,
+):
+ """
+ Returns the expected body length:
+ - a positive integer, if the size is known in advance
+ - None, if the size in unknown in advance (chunked encoding)
+ - -1, if all data should be read until end of stream.
+
+ Raises:
+ HttpSyntaxException, if the content length header is invalid
+ """
+ # Determine response size according to
+ # http://tools.ietf.org/html/rfc7230#section-3.3
+ if request_method:
+ request_method = request_method.upper()
+
+ is_empty_response = (not is_request and (
+ request_method == b"HEAD" or
+ 100 <= response_code <= 199 or
+ (response_code == 200 and request_method == b"CONNECT") or
+ response_code in (204, 304)
+ ))
+
+ if is_empty_response:
+ return 0
+ if is_request and headers.get(b"expect", b"").lower() == b"100-continue":
+ return 0
+ if b"chunked" in headers.get(b"transfer-encoding", b"").lower():
+ return None
+ if b"content-length" in headers:
+ try:
+ size = int(headers[b"content-length"])
+ if size < 0:
+ raise ValueError()
+ return size
+ except ValueError:
+ raise HttpSyntaxException("Unparseable Content Length")
+ if is_request:
+ return 0
+ return -1
+
+
+def _get_first_line(rfile):
+ line = rfile.readline()
+ if line == b"\r\n" or line == b"\n":
+ # Possible leftover from previous message
+ line = rfile.readline()
+ if not line:
+ raise HttpReadDisconnect()
+ return line
+
+
+def _read_request_line(rfile):
+ line = _get_first_line(rfile)
+
+ try:
+ method, path, http_version = line.strip().split(b" ")
+
+ if path == b"*" or path.startswith(b"/"):
+ form = "relative"
+ path.decode("ascii") # should not raise a ValueError
+ scheme, host, port = None, None, None
+ elif method == b"CONNECT":
+ form = "authority"
+ host, port = _parse_authority_form(path)
+ scheme, path = None, None
+ else:
+ form = "absolute"
+ scheme, host, port, path = utils.parse_url(path)
+
+ except ValueError:
+ raise HttpSyntaxException("Bad HTTP request line: {}".format(line))
+
+ return form, method, scheme, host, port, path, http_version
+
+
+def _parse_authority_form(hostport):
+ """
+ Returns (host, port) if hostport is a valid authority-form host specification.
+ http://tools.ietf.org/html/draft-luotonen-web-proxy-tunneling-01 section 3.1
+
+ Raises:
+ ValueError, if the input is malformed
+ """
+ try:
+ host, port = hostport.split(b":")
+ port = int(port)
+ if not utils.is_valid_host(host) or not utils.is_valid_port(port):
+ raise ValueError()
+ except ValueError:
+ raise ValueError("Invalid host specification: {}".format(hostport))
+
+ return host, port
+
+
+def _read_response_line(rfile):
+ line = _get_first_line(rfile)
+
+ try:
+
+ parts = line.strip().split(b" ")
+ if len(parts) == 2: # handle missing message gracefully
+ parts.append(b"")
+
+ http_version, status_code, message = parts
+ status_code = int(status_code)
+ _check_http_version(http_version)
+
+ except ValueError:
+ raise HttpSyntaxException("Bad HTTP response line: {}".format(line))
+
+ return http_version, status_code, message
+
+
+def _check_http_version(http_version):
+ if not re.match(rb"^HTTP/\d\.\d$", http_version):
+ raise HttpSyntaxException("Unknown HTTP version: {}".format(http_version))
+
+
+def _read_headers(rfile):
+ """
+ Read a set of headers.
+ Stop once a blank line is reached.
+
+ Returns:
+ A headers object
+
+ Raises:
+ HttpSyntaxException
+ """
+ ret = []
+ while True:
+ line = rfile.readline()
+ if not line or line == b"\r\n" or line == b"\n":
+ break
+ if line[0] in b" \t":
+ if not ret:
+ raise HttpSyntaxException("Invalid headers")
+ # continued header
+ ret[-1][1] = ret[-1][1] + b'\r\n ' + line.strip()
+ else:
+ try:
+ name, value = line.split(b":", 1)
+ value = value.strip()
+ ret.append([name, value])
+ except ValueError:
+ raise HttpSyntaxException("Invalid headers")
+ return Headers(ret)
+
+
+def _read_chunked(rfile, limit):
+ """
+ Read a HTTP body with chunked transfer encoding.
+
+ Args:
+ rfile: the input file
+ limit: A positive integer
+ """
+ total = 0
+ while True:
+ line = rfile.readline(128)
+ if line == b"":
+ raise HttpException("Connection closed prematurely")
+ if line != b"\r\n" and line != b"\n":
+ try:
+ length = int(line, 16)
+ except ValueError:
+ raise HttpSyntaxException("Invalid chunked encoding length: {}".format(line))
+ total += length
+ if total > limit:
+ raise HttpException(
+ "HTTP Body too large. Limit is {}, "
+ "chunked content longer than {}".format(limit, total)
+ )
+ chunk = rfile.read(length)
+ suffix = rfile.readline(5)
+ if suffix != b"\r\n":
+ raise HttpSyntaxException("Malformed chunked body")
+ if length == 0:
+ return
+ yield chunk