aboutsummaryrefslogtreecommitdiffstats
path: root/netlib/http
diff options
context:
space:
mode:
Diffstat (limited to 'netlib/http')
-rw-r--r--netlib/http/__init__.py10
-rw-r--r--netlib/http/authentication.py6
-rw-r--r--netlib/http/cookies.py12
-rw-r--r--netlib/http/headers.py8
-rw-r--r--netlib/http/http1/assemble.py8
-rw-r--r--netlib/http/http1/read.py76
-rw-r--r--netlib/http/http2/connections.py23
-rw-r--r--netlib/http/message.py4
-rw-r--r--netlib/http/multipart.py2
-rw-r--r--netlib/http/request.py50
-rw-r--r--netlib/http/response.py28
-rw-r--r--netlib/http/url.py2
12 files changed, 117 insertions, 112 deletions
diff --git a/netlib/http/__init__.py b/netlib/http/__init__.py
index 14de26a1..af95f4d0 100644
--- a/netlib/http/__init__.py
+++ b/netlib/http/__init__.py
@@ -1,9 +1,9 @@
from __future__ import absolute_import, print_function, division
-from .request import Request
-from .response import Response
-from .headers import Headers, parse_content_type
-from .message import decoded
-from . import http1, http2, status_codes, multipart
+from netlib.http.request import Request
+from netlib.http.response import Response
+from netlib.http.headers import Headers, parse_content_type
+from netlib.http.message import decoded
+from netlib.http import http1, http2, status_codes, multipart
__all__ = [
"Request",
diff --git a/netlib/http/authentication.py b/netlib/http/authentication.py
index 6db70fdd..38ea46d6 100644
--- a/netlib/http/authentication.py
+++ b/netlib/http/authentication.py
@@ -1,5 +1,5 @@
from __future__ import (absolute_import, print_function, division)
-from argparse import Action, ArgumentTypeError
+import argparse
import binascii
@@ -124,7 +124,7 @@ class PassManSingleUser(PassMan):
return self.username == username and self.password == password_token
-class AuthAction(Action):
+class AuthAction(argparse.Action):
"""
Helper class to allow seamless integration int argparse. Example usage:
@@ -148,7 +148,7 @@ class SingleuserAuthAction(AuthAction):
def getPasswordManager(self, s):
if len(s.split(':')) != 2:
- raise ArgumentTypeError(
+ raise argparse.ArgumentTypeError(
"Invalid single-user specification. Please use the format username:password"
)
username, password = s.split(':')
diff --git a/netlib/http/cookies.py b/netlib/http/cookies.py
index 2be93e18..768a85df 100644
--- a/netlib/http/cookies.py
+++ b/netlib/http/cookies.py
@@ -1,8 +1,8 @@
import collections
import re
-from email.utils import parsedate_tz, formatdate, mktime_tz
-from netlib.multidict import ImmutableMultiDict
+import email.utils
+from netlib import multidict
"""
A flexible module for cookie parsing and manipulation.
@@ -167,7 +167,7 @@ def parse_set_cookie_headers(headers):
return ret
-class CookieAttrs(ImmutableMultiDict):
+class CookieAttrs(multidict.ImmutableMultiDict):
@staticmethod
def _kconv(key):
return key.lower()
@@ -243,10 +243,10 @@ def refresh_set_cookie_header(c, delta):
raise ValueError("Invalid Cookie")
if "expires" in attrs:
- e = parsedate_tz(attrs["expires"])
+ e = email.utils.parsedate_tz(attrs["expires"])
if e:
- f = mktime_tz(e) + delta
- attrs = attrs.with_set_all("expires", [formatdate(f)])
+ f = email.utils.mktime_tz(e) + delta
+ attrs = attrs.with_set_all("expires", [email.utils.formatdate(f)])
else:
# This can happen when the expires tag is invalid.
# reddit.com sends a an expires tag like this: "Thu, 31 Dec
diff --git a/netlib/http/headers.py b/netlib/http/headers.py
index fa7b7180..9bf4b69d 100644
--- a/netlib/http/headers.py
+++ b/netlib/http/headers.py
@@ -3,8 +3,8 @@ from __future__ import absolute_import, print_function, division
import re
import six
-from ..multidict import MultiDict
-from ..utils import always_bytes
+from netlib import multidict
+from netlib import utils
# See also: http://lucumr.pocoo.org/2013/7/2/the-updated-guide-to-unicode/
@@ -20,10 +20,10 @@ else:
return x.decode("utf-8", "surrogateescape")
def _always_bytes(x):
- return always_bytes(x, "utf-8", "surrogateescape")
+ return utils.always_bytes(x, "utf-8", "surrogateescape")
-class Headers(MultiDict):
+class Headers(multidict.MultiDict):
"""
Header class which allows both convenient access to individual headers as well as
direct access to the underlying raw data. Provides a full dictionary interface.
diff --git a/netlib/http/http1/assemble.py b/netlib/http/http1/assemble.py
index 2f941877..00d1563b 100644
--- a/netlib/http/http1/assemble.py
+++ b/netlib/http/http1/assemble.py
@@ -1,12 +1,12 @@
from __future__ import absolute_import, print_function, division
-from ... import utils
-from ...exceptions import HttpException
+from netlib import utils
+from netlib import exceptions
def assemble_request(request):
if request.content is None:
- raise HttpException("Cannot assemble flow with missing content")
+ raise exceptions.HttpException("Cannot assemble flow with missing content")
head = assemble_request_head(request)
body = b"".join(assemble_body(request.data.headers, [request.data.content]))
return head + body
@@ -20,7 +20,7 @@ def assemble_request_head(request):
def assemble_response(response):
if response.content is None:
- raise HttpException("Cannot assemble flow with missing content")
+ raise exceptions.HttpException("Cannot assemble flow with missing content")
head = assemble_response_head(response)
body = b"".join(assemble_body(response.data.headers, [response.data.content]))
return head + body
diff --git a/netlib/http/http1/read.py b/netlib/http/http1/read.py
index 5783ec67..bf4c2f0c 100644
--- a/netlib/http/http1/read.py
+++ b/netlib/http/http1/read.py
@@ -3,10 +3,12 @@ import time
import sys
import re
-from ... import utils
-from ...exceptions import HttpReadDisconnect, HttpSyntaxException, HttpException, TcpDisconnect
-from .. import Request, Response, Headers
-from .. import url
+from netlib.http import request
+from netlib.http import response
+from netlib.http import headers
+from netlib.http import url
+from netlib import utils
+from netlib import exceptions
def get_header_tokens(headers, key):
@@ -40,9 +42,9 @@ def read_request_head(rfile):
The HTTP request object (without body)
Raises:
- HttpReadDisconnect: No bytes can be read from rfile.
- HttpSyntaxException: The input is malformed HTTP.
- HttpException: Any other error occured.
+ exceptions.HttpReadDisconnect: No bytes can be read from rfile.
+ exceptions.HttpSyntaxException: The input is malformed HTTP.
+ exceptions.HttpException: Any other error occured.
"""
timestamp_start = time.time()
if hasattr(rfile, "reset_timestamps"):
@@ -55,7 +57,7 @@ def read_request_head(rfile):
# more accurate timestamp_start
timestamp_start = rfile.first_byte_timestamp
- return Request(
+ return request.Request(
form, method, scheme, host, port, path, http_version, headers, None, timestamp_start
)
@@ -79,9 +81,9 @@ def read_response_head(rfile):
The HTTP request object (without body)
Raises:
- HttpReadDisconnect: No bytes can be read from rfile.
- HttpSyntaxException: The input is malformed HTTP.
- HttpException: Any other error occured.
+ exceptions.HttpReadDisconnect: No bytes can be read from rfile.
+ exceptions.HttpSyntaxException: The input is malformed HTTP.
+ exceptions.HttpException: Any other error occured.
"""
timestamp_start = time.time()
@@ -95,7 +97,7 @@ def read_response_head(rfile):
# more accurate timestamp_start
timestamp_start = rfile.first_byte_timestamp
- return Response(http_version, status_code, message, headers, None, timestamp_start)
+ return response.Response(http_version, status_code, message, headers, None, timestamp_start)
def read_body(rfile, expected_size, limit=None, max_chunk_size=4096):
@@ -112,7 +114,7 @@ def read_body(rfile, expected_size, limit=None, max_chunk_size=4096):
A generator that yields byte chunks of the content.
Raises:
- HttpException, if an error occurs
+ exceptions.HttpException, if an error occurs
Caveats:
max_chunk_size is not considered if the transfer encoding is chunked.
@@ -127,7 +129,7 @@ def read_body(rfile, expected_size, limit=None, max_chunk_size=4096):
yield x
elif expected_size >= 0:
if limit is not None and expected_size > limit:
- raise HttpException(
+ raise exceptions.HttpException(
"HTTP Body too large. "
"Limit is {}, content length was advertised as {}".format(limit, expected_size)
)
@@ -136,7 +138,7 @@ def read_body(rfile, expected_size, limit=None, max_chunk_size=4096):
chunk_size = min(bytes_left, max_chunk_size)
content = rfile.read(chunk_size)
if len(content) < chunk_size:
- raise HttpException("Unexpected EOF")
+ raise exceptions.HttpException("Unexpected EOF")
yield content
bytes_left -= chunk_size
else:
@@ -150,7 +152,7 @@ def read_body(rfile, expected_size, limit=None, max_chunk_size=4096):
bytes_left -= chunk_size
not_done = rfile.read(1)
if not_done:
- raise HttpException("HTTP body too large. Limit is {}.".format(limit))
+ raise exceptions.HttpException("HTTP body too large. Limit is {}.".format(limit))
def connection_close(http_version, headers):
@@ -180,7 +182,7 @@ def expected_http_body_size(request, response=None):
- -1, if all data should be read until end of stream.
Raises:
- HttpSyntaxException, if the content length header is invalid
+ exceptions.HttpSyntaxException, if the content length header is invalid
"""
# Determine response size according to
# http://tools.ietf.org/html/rfc7230#section-3.3
@@ -215,7 +217,7 @@ def expected_http_body_size(request, response=None):
raise ValueError()
return size
except ValueError:
- raise HttpSyntaxException("Unparseable Content Length")
+ raise exceptions.HttpSyntaxException("Unparseable Content Length")
if is_request:
return 0
return -1
@@ -227,19 +229,19 @@ def _get_first_line(rfile):
if line == b"\r\n" or line == b"\n":
# Possible leftover from previous message
line = rfile.readline()
- except TcpDisconnect:
- raise HttpReadDisconnect("Remote disconnected")
+ except exceptions.TcpDisconnect:
+ raise exceptions.HttpReadDisconnect("Remote disconnected")
if not line:
- raise HttpReadDisconnect("Remote disconnected")
+ raise exceptions.HttpReadDisconnect("Remote disconnected")
return line.strip()
def _read_request_line(rfile):
try:
line = _get_first_line(rfile)
- except HttpReadDisconnect:
+ except exceptions.HttpReadDisconnect:
# We want to provide a better error message.
- raise HttpReadDisconnect("Client disconnected")
+ raise exceptions.HttpReadDisconnect("Client disconnected")
try:
method, path, http_version = line.split(b" ")
@@ -257,7 +259,7 @@ def _read_request_line(rfile):
_check_http_version(http_version)
except ValueError:
- raise HttpSyntaxException("Bad HTTP request line: {}".format(line))
+ raise exceptions.HttpSyntaxException("Bad HTTP request line: {}".format(line))
return form, method, scheme, host, port, path, http_version
@@ -276,7 +278,7 @@ def _parse_authority_form(hostport):
if not utils.is_valid_host(host) or not utils.is_valid_port(port):
raise ValueError()
except ValueError:
- raise HttpSyntaxException("Invalid host specification: {}".format(hostport))
+ raise exceptions.HttpSyntaxException("Invalid host specification: {}".format(hostport))
return host, port
@@ -284,9 +286,9 @@ def _parse_authority_form(hostport):
def _read_response_line(rfile):
try:
line = _get_first_line(rfile)
- except HttpReadDisconnect:
+ except exceptions.HttpReadDisconnect:
# We want to provide a better error message.
- raise HttpReadDisconnect("Server disconnected")
+ raise exceptions.HttpReadDisconnect("Server disconnected")
try:
@@ -299,14 +301,14 @@ def _read_response_line(rfile):
_check_http_version(http_version)
except ValueError:
- raise HttpSyntaxException("Bad HTTP response line: {}".format(line))
+ raise exceptions.HttpSyntaxException("Bad HTTP response line: {}".format(line))
return http_version, status_code, message
def _check_http_version(http_version):
if not re.match(br"^HTTP/\d\.\d$", http_version):
- raise HttpSyntaxException("Unknown HTTP version: {}".format(http_version))
+ raise exceptions.HttpSyntaxException("Unknown HTTP version: {}".format(http_version))
def _read_headers(rfile):
@@ -318,7 +320,7 @@ def _read_headers(rfile):
A headers object
Raises:
- HttpSyntaxException
+ exceptions.HttpSyntaxException
"""
ret = []
while True:
@@ -327,7 +329,7 @@ def _read_headers(rfile):
break
if line[0] in b" \t":
if not ret:
- raise HttpSyntaxException("Invalid headers")
+ raise exceptions.HttpSyntaxException("Invalid headers")
# continued header
ret[-1] = (ret[-1][0], ret[-1][1] + b'\r\n ' + line.strip())
else:
@@ -338,8 +340,8 @@ def _read_headers(rfile):
raise ValueError()
ret.append((name, value))
except ValueError:
- raise HttpSyntaxException("Invalid headers")
- return Headers(ret)
+ raise exceptions.HttpSyntaxException("Invalid headers")
+ return headers.Headers(ret)
def _read_chunked(rfile, limit=sys.maxsize):
@@ -354,22 +356,22 @@ def _read_chunked(rfile, limit=sys.maxsize):
while True:
line = rfile.readline(128)
if line == b"":
- raise HttpException("Connection closed prematurely")
+ raise exceptions.HttpException("Connection closed prematurely")
if line != b"\r\n" and line != b"\n":
try:
length = int(line, 16)
except ValueError:
- raise HttpSyntaxException("Invalid chunked encoding length: {}".format(line))
+ raise exceptions.HttpSyntaxException("Invalid chunked encoding length: {}".format(line))
total += length
if total > limit:
- raise HttpException(
+ raise exceptions.HttpException(
"HTTP Body too large. Limit is {}, "
"chunked content longer than {}".format(limit, total)
)
chunk = rfile.read(length)
suffix = rfile.readline(5)
if suffix != b"\r\n":
- raise HttpSyntaxException("Malformed chunked body")
+ raise exceptions.HttpSyntaxException("Malformed chunked body")
if length == 0:
return
yield chunk
diff --git a/netlib/http/http2/connections.py b/netlib/http/http2/connections.py
index 16bdf618..8667d370 100644
--- a/netlib/http/http2/connections.py
+++ b/netlib/http/http2/connections.py
@@ -5,9 +5,12 @@ import time
import hyperframe.frame
from hpack.hpack import Encoder, Decoder
-from ... import utils
-from .. import Headers, Response, Request, url
-from . import framereader
+from netlib import utils
+from netlib.http import url
+import netlib.http.headers
+import netlib.http.response
+import netlib.http.request
+from netlib.http.http2 import framereader
class TCPHandler(object):
@@ -128,7 +131,7 @@ class HTTP2Protocol(object):
port = 80 if scheme == 'http' else 443
port = int(port)
- request = Request(
+ request = netlib.http.request.Request(
first_line_format,
method.encode('ascii'),
scheme.encode('ascii'),
@@ -176,7 +179,7 @@ class HTTP2Protocol(object):
else:
timestamp_end = None
- response = Response(
+ response = netlib.http.response.Response(
b"HTTP/2.0",
int(headers.get(':status', 502)),
b'',
@@ -190,15 +193,15 @@ class HTTP2Protocol(object):
return response
def assemble(self, message):
- if isinstance(message, Request):
+ if isinstance(message, netlib.http.request.Request):
return self.assemble_request(message)
- elif isinstance(message, Response):
+ elif isinstance(message, netlib.http.response.Response):
return self.assemble_response(message)
else:
raise ValueError("HTTP message not supported.")
def assemble_request(self, request):
- assert isinstance(request, Request)
+ assert isinstance(request, netlib.http.request.Request)
authority = self.tcp_handler.sni if self.tcp_handler.sni else self.tcp_handler.address.host
if self.tcp_handler.address.port != 443:
@@ -222,7 +225,7 @@ class HTTP2Protocol(object):
self._create_body(request.body, stream_id)))
def assemble_response(self, response):
- assert isinstance(response, Response)
+ assert isinstance(response, netlib.http.response.Response)
headers = response.headers.copy()
@@ -422,7 +425,7 @@ class HTTP2Protocol(object):
else:
self._handle_unexpected_frame(frm)
- headers = Headers(
+ headers = netlib.http.headers.Headers(
(k.encode('ascii'), v.encode('ascii')) for k, v in self.decoder.decode(header_blocks)
)
diff --git a/netlib/http/message.py b/netlib/http/message.py
index d9654f26..c51f16a2 100644
--- a/netlib/http/message.py
+++ b/netlib/http/message.py
@@ -4,8 +4,8 @@ import warnings
import six
-from .. import encoding, utils, basetypes
-from . import headers
+from netlib import encoding, utils, basetypes
+from netlib.http import headers
if six.PY2: # pragma: no cover
def _native(x):
diff --git a/netlib/http/multipart.py b/netlib/http/multipart.py
index a135eb86..536b2809 100644
--- a/netlib/http/multipart.py
+++ b/netlib/http/multipart.py
@@ -1,6 +1,6 @@
import re
-from . import headers
+from netlib.http import headers
def decode(hdrs, content):
diff --git a/netlib/http/request.py b/netlib/http/request.py
index 2fcea67d..890cf593 100644
--- a/netlib/http/request.py
+++ b/netlib/http/request.py
@@ -5,14 +5,14 @@ import re
import six
from six.moves import urllib
+from netlib import encoding
+from netlib import multidict
from netlib import utils
-import netlib.http.url
from netlib.http import multipart
-from . import cookies
-from .. import encoding
-from ..multidict import MultiDictView
-from .headers import Headers
-from .message import Message, _native, _always_bytes, MessageData
+from netlib.http import cookies
+from netlib.http import headers as nheaders
+from netlib.http import message
+import netlib.http.url
# This regex extracts & splits the host header into host and port.
# Handles the edge case of IPv6 addresses containing colons.
@@ -20,11 +20,11 @@ from .message import Message, _native, _always_bytes, MessageData
host_header_re = re.compile(r"^(?P<host>[^:]+|\[.+\])(?::(?P<port>\d+))?$")
-class RequestData(MessageData):
+class RequestData(message.MessageData):
def __init__(self, first_line_format, method, scheme, host, port, path, http_version, headers=(), content=None,
timestamp_start=None, timestamp_end=None):
- if not isinstance(headers, Headers):
- headers = Headers(headers)
+ if not isinstance(headers, nheaders.Headers):
+ headers = nheaders.Headers(headers)
self.first_line_format = first_line_format
self.method = method
@@ -39,7 +39,7 @@ class RequestData(MessageData):
self.timestamp_end = timestamp_end
-class Request(Message):
+class Request(message.Message):
"""
An HTTP request.
"""
@@ -91,22 +91,22 @@ class Request(Message):
"""
HTTP request method, e.g. "GET".
"""
- return _native(self.data.method).upper()
+ return message._native(self.data.method).upper()
@method.setter
def method(self, method):
- self.data.method = _always_bytes(method)
+ self.data.method = message._always_bytes(method)
@property
def scheme(self):
"""
HTTP request scheme, which should be "http" or "https".
"""
- return _native(self.data.scheme)
+ return message._native(self.data.scheme)
@scheme.setter
def scheme(self, scheme):
- self.data.scheme = _always_bytes(scheme)
+ self.data.scheme = message._always_bytes(scheme)
@property
def host(self):
@@ -168,11 +168,11 @@ class Request(Message):
if self.data.path is None:
return None
else:
- return _native(self.data.path)
+ return message._native(self.data.path)
@path.setter
def path(self, path):
- self.data.path = _always_bytes(path)
+ self.data.path = message._always_bytes(path)
@property
def url(self):
@@ -225,11 +225,11 @@ class Request(Message):
@property
def query(self):
- # type: () -> MultiDictView
+ # type: () -> multidict.MultiDictView
"""
The request query string as an :py:class:`MultiDictView` object.
"""
- return MultiDictView(
+ return multidict.MultiDictView(
self._get_query,
self._set_query
)
@@ -250,13 +250,13 @@ class Request(Message):
@property
def cookies(self):
- # type: () -> MultiDictView
+ # type: () -> multidict.MultiDictView
"""
The request cookies.
- An empty :py:class:`MultiDictView` object if the cookie monster ate them all.
+ An empty :py:class:`multidict.MultiDictView` object if the cookie monster ate them all.
"""
- return MultiDictView(
+ return multidict.MultiDictView(
self._get_cookies,
self._set_cookies
)
@@ -329,11 +329,11 @@ class Request(Message):
@property
def urlencoded_form(self):
"""
- The URL-encoded form data as an :py:class:`MultiDictView` object.
- An empty MultiDictView if the content-type indicates non-form data
+ The URL-encoded form data as an :py:class:`multidict.MultiDictView` object.
+ An empty multidict.MultiDictView if the content-type indicates non-form data
or the content could not be parsed.
"""
- return MultiDictView(
+ return multidict.MultiDictView(
self._get_urlencoded_form,
self._set_urlencoded_form
)
@@ -362,7 +362,7 @@ class Request(Message):
The multipart form data as an :py:class:`MultipartFormDict` object.
None if the content-type indicates non-form data.
"""
- return MultiDictView(
+ return multidict.MultiDictView(
self._get_multipart_form,
self._set_multipart_form
)
diff --git a/netlib/http/response.py b/netlib/http/response.py
index 858b3aea..44b58be6 100644
--- a/netlib/http/response.py
+++ b/netlib/http/response.py
@@ -3,18 +3,18 @@ from __future__ import absolute_import, print_function, division
from email.utils import parsedate_tz, formatdate, mktime_tz
import time
-from . import cookies
-from .headers import Headers
-from .message import Message, _native, _always_bytes, MessageData
-from ..multidict import MultiDictView
-from .. import human
+from netlib.http import cookies
+from netlib.http import headers as nheaders
+from netlib.http import message
+from netlib import multidict
+from netlib import human
-class ResponseData(MessageData):
+class ResponseData(message.MessageData):
def __init__(self, http_version, status_code, reason=None, headers=(), content=None,
timestamp_start=None, timestamp_end=None):
- if not isinstance(headers, Headers):
- headers = Headers(headers)
+ if not isinstance(headers, nheaders.Headers):
+ headers = nheaders.Headers(headers)
self.http_version = http_version
self.status_code = status_code
@@ -25,7 +25,7 @@ class ResponseData(MessageData):
self.timestamp_end = timestamp_end
-class Response(Message):
+class Response(message.Message):
"""
An HTTP response.
"""
@@ -63,17 +63,17 @@ class Response(Message):
HTTP Reason Phrase, e.g. "Not Found".
This is always :py:obj:`None` for HTTP2 requests, because HTTP2 responses do not contain a reason phrase.
"""
- return _native(self.data.reason)
+ return message._native(self.data.reason)
@reason.setter
def reason(self, reason):
- self.data.reason = _always_bytes(reason)
+ self.data.reason = message._always_bytes(reason)
@property
def cookies(self):
- # type: () -> MultiDictView
+ # type: () -> multidict.MultiDictView
"""
- The response cookies. A possibly empty :py:class:`MultiDictView`, where the keys are
+ The response cookies. A possibly empty :py:class:`multidict.MultiDictView`, where the keys are
cookie name strings, and values are (value, attr) tuples. Value is a string, and attr is
an ODictCaseless containing cookie attributes. Within attrs, unary attributes (e.g. HTTPOnly)
are indicated by a Null value.
@@ -81,7 +81,7 @@ class Response(Message):
Caveats:
Updating the attr
"""
- return MultiDictView(
+ return multidict.MultiDictView(
self._get_cookies,
self._set_cookies
)
diff --git a/netlib/http/url.py b/netlib/http/url.py
index 8ce28578..5d461387 100644
--- a/netlib/http/url.py
+++ b/netlib/http/url.py
@@ -1,7 +1,7 @@
import six
from six.moves import urllib
-from .. import utils
+from netlib import utils
# PY2 workaround