Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for httpx.Response(content=..., text=..., html=..., json=...) #1250

Closed
wants to merge 9 commits into from
96 changes: 75 additions & 21 deletions httpx/_content_streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import httpcore

from ._exceptions import StreamConsumed
from ._types import FileContent, FileTypes, RequestData, RequestFiles
from ._types import FileContent, FileTypes, RequestData, RequestFiles, ResponseContent
from ._utils import (
format_form_param,
guess_content_type,
Expand Down Expand Up @@ -72,11 +72,8 @@ class IteratorStream(ContentStream):
Request content encoded as plain bytes, using an byte iterator.
"""

def __init__(
self, iterator: typing.Iterator[bytes], close_func: typing.Callable = None
) -> None:
def __init__(self, iterator: typing.Iterator[bytes]) -> None:
self.iterator = iterator
self.close_func = close_func
self.is_stream_consumed = False

def can_replay(self) -> bool:
Expand All @@ -95,21 +92,14 @@ def __iter__(self) -> typing.Iterator[bytes]:
def __aiter__(self) -> typing.AsyncIterator[bytes]:
raise RuntimeError("Attempted to call a async iterator on an sync stream.")

def close(self) -> None:
if self.close_func is not None:
self.close_func()
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Turns out we don't really need close_func on IteratorStream/AsyncIteratorStream anymore, which falls out from updating test cases to use Response(content=<bytes iterator>)



class AsyncIteratorStream(ContentStream):
"""
Request content encoded as plain bytes, using an async byte iterator.
"""

def __init__(
self, aiterator: typing.AsyncIterator[bytes], close_func: typing.Callable = None
) -> None:
def __init__(self, aiterator: typing.AsyncIterator[bytes]) -> None:
self.aiterator = aiterator
self.close_func = close_func
self.is_stream_consumed = False

def can_replay(self) -> bool:
Expand All @@ -128,10 +118,6 @@ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
async for part in self.aiterator:
yield part

async def aclose(self) -> None:
if self.close_func is not None:
await self.close_func()


class JSONStream(ContentStream):
"""
Expand Down Expand Up @@ -370,7 +356,47 @@ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
yield chunk


def encode(
class TextStream(ContentStream):
"""
Response content as plain text.
"""

def __init__(self, text: str) -> None:
self.body = text.encode("utf-8")

def get_headers(self) -> typing.Dict[str, str]:
content_length = str(len(self.body))
content_type = "text/plain; charset=utf-8"
return {"Content-Length": content_length, "Content-Type": content_type}

def __iter__(self) -> typing.Iterator[bytes]:
yield self.body

async def __aiter__(self) -> typing.AsyncIterator[bytes]:
yield self.body


class HTMLStream(ContentStream):
"""
Response content as HTML.
"""

def __init__(self, html: str) -> None:
self.body = html.encode("utf-8")

def get_headers(self) -> typing.Dict[str, str]:
content_length = str(len(self.body))
content_type = "text/html; charset=utf-8"
return {"Content-Length": content_length, "Content-Type": content_type}

def __iter__(self) -> typing.Iterator[bytes]:
yield self.body

async def __aiter__(self) -> typing.AsyncIterator[bytes]:
yield self.body


def encode_request_body(
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
Expand All @@ -380,13 +406,12 @@ def encode(
Handles encoding the given `data`, `files`, and `json`, returning
a `ContentStream` implementation.
"""
if not data:
if data is None:
if json is not None:
return JSONStream(json=json)
elif files:
return MultipartStream(data={}, files=files, boundary=boundary)
else:
return ByteStream(body=b"")
return ByteStream(body=b"")
elif isinstance(data, dict):
if files:
return MultipartStream(data=data, files=files, boundary=boundary)
Expand All @@ -402,3 +427,32 @@ def encode(
return IteratorStream(iterator=data)

raise TypeError(f"Unexpected type for 'data', {type(data)!r}")


def encode_response_body(
content: ResponseContent = None,
text: str = None,
html: str = None,
json: typing.Any = None,
) -> ContentStream:
if content is None:
if text is not None:
return TextStream(text=text)
elif html is not None:
return HTMLStream(html=html)
elif json is not None:
return JSONStream(json=json)
return ByteStream(b"")
elif isinstance(content, bytes):
return ByteStream(body=content)
elif hasattr(content, "__aiter__"):
content = typing.cast(typing.AsyncIterator[bytes], content)
return AsyncIteratorStream(aiterator=content)
elif hasattr(content, "__iter__"):
content = typing.cast(typing.Iterator[bytes], content)
return IteratorStream(iterator=content)

raise TypeError(
f"Unexpected type for 'content', should be bytes or "
f"byte iterator {type(content)!r}"
)
28 changes: 22 additions & 6 deletions httpx/_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,12 @@
import rfc3986
import rfc3986.exceptions

from ._content_streams import ByteStream, ContentStream, encode
from ._content_streams import (
ByteStream,
ContentStream,
encode_request_body,
encode_response_body,
)
from ._decoders import (
SUPPORTED_DECODERS,
Decoder,
Expand Down Expand Up @@ -44,6 +49,7 @@
QueryParamTypes,
RequestData,
RequestFiles,
ResponseContent,
URLTypes,
)
from ._utils import (
Expand Down Expand Up @@ -600,7 +606,7 @@ def __init__(
if stream is not None:
self.stream = stream
else:
self.stream = encode(data, files, json)
self.stream = encode_request_body(data, files, json)

self.timer = ElapsedTimer()
self.prepare()
Expand Down Expand Up @@ -668,11 +674,14 @@ def __init__(
self,
status_code: int,
*,
request: Request = None,
http_version: str = None,
headers: HeaderTypes = None,
content: ResponseContent = None,
text: str = None,
html: str = None,
json: typing.Any = None,
stream: ContentStream = None,
content: bytes = None,
request: Request = None,
history: typing.List["Response"] = None,
):
self.status_code = status_code
Expand All @@ -690,8 +699,15 @@ def __init__(
if stream is not None:
self._raw_stream = stream
else:
self._raw_stream = ByteStream(body=content or b"")
self.read()
self._raw_stream = encode_response_body(
content=content, text=text, html=html, json=json
)
for key, value in self._raw_stream.get_headers().items():
self.headers.setdefault(key, value)

if content is None or isinstance(content, bytes):
# Load the response body, except for streaming content.
self.read()

@property
def elapsed(self) -> datetime.timedelta:
Expand Down
26 changes: 17 additions & 9 deletions httpx/_transports/urllib3.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import httpcore

from .._config import create_ssl_context
from .._content_streams import ByteStream, IteratorStream
from .._content_streams import ByteStream
from .._exceptions import NetworkError, map_exceptions
from .._types import CertTypes, VerifyTypes

Expand All @@ -15,6 +15,21 @@
urllib3 = None


class URLLib3ByteStream(httpcore.SyncByteStream):
def __init__(self, conn: urllib3.HTTPResponse) -> None:
self._conn = conn

def __iter__(self) -> Iterator[bytes]:
try:
for chunk in self._conn.stream(4096, decode_content=False):
yield chunk
except socket.error as exc:
raise httpcore.NetworkError(exc) from exc

def close(self) -> None:
self._conn.release_conn()
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Changes in this module are redundant given #1182, but I've updated this all the same since this PR removes close_func from our internal IteratorStream.



class URLLib3Transport(httpcore.SyncHTTPTransport):
def __init__(
self,
Expand Down Expand Up @@ -104,16 +119,9 @@ def request(
pool_timeout=timeout.get("pool"),
)

def response_bytes() -> Iterator[bytes]:
with map_exceptions({socket.error: NetworkError}):
for chunk in conn.stream(4096, decode_content=False):
yield chunk

status_code = conn.status
headers = list(conn.headers.items())
response_stream = IteratorStream(
iterator=response_bytes(), close_func=conn.release_conn
)
response_stream = URLLib3ByteStream(conn=conn)
return (b"HTTP/1.1", status_code, conn.reason, headers, response_stream)

def close(self) -> None:
Expand Down
1 change: 1 addition & 0 deletions httpx/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
]

RequestData = Union[dict, str, bytes, Iterator[bytes], AsyncIterator[bytes]]
ResponseContent = Union[bytes, Iterator[bytes], AsyncIterator[bytes]]

FileContent = Union[IO[str], IO[bytes], str, bytes]
FileTypes = Union[
Expand Down
8 changes: 6 additions & 2 deletions tests/client/test_queryparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import httpcore

import httpx
from httpx._content_streams import ContentStream, JSONStream
from httpx._content_streams import JSONStream


class MockTransport(httpcore.SyncHTTPTransport):
Expand All @@ -15,7 +15,11 @@ def request(
stream: httpcore.SyncByteStream = None,
timeout: typing.Mapping[str, typing.Optional[float]] = None,
) -> typing.Tuple[
bytes, int, bytes, typing.List[typing.Tuple[bytes, bytes]], ContentStream
bytes,
int,
bytes,
typing.List[typing.Tuple[bytes, bytes]],
httpcore.SyncByteStream,
]:
body = JSONStream({"ok": "ok"})
return b"HTTP/1.1", 200, b"OK", [], body
Expand Down
Loading