Skip to content

Commit

Permalink
⚗️ Try compatible fork Niquests to supercharge HTTPie
Browse files Browse the repository at this point in the history
  • Loading branch information
Ahmed TAHRI committed Oct 13, 2023
1 parent e52a60e commit faf9aef
Show file tree
Hide file tree
Showing 38 changed files with 289 additions and 195 deletions.
7 changes: 1 addition & 6 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: [3.7, 3.8, 3.9, "3.10"]
pyopenssl: [0, 1]
python-version: [3.7, 3.8, 3.9, "3.10", "3.11", "3.12"]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
Expand All @@ -39,12 +38,8 @@ jobs:
python -m pip install --upgrade pip wheel
python -m pip install --upgrade '.[dev]'
python -m pytest --verbose ./httpie ./tests
env:
HTTPIE_TEST_WITH_PYOPENSSL: ${{ matrix.pyopenssl }}
- name: Linux & Mac setup
if: matrix.os != 'windows-latest'
run: |
make install
make test
env:
HTTPIE_TEST_WITH_PYOPENSSL: ${{ matrix.pyopenssl }}
2 changes: 1 addition & 1 deletion docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2556,7 +2556,7 @@ HTTPie has the following community channels:
Under the hood, HTTPie uses these two amazing libraries:
- [Requests](https://requests.readthedocs.io/en/latest/) — Python HTTP library for humans
- [Niquests](https://niquests.readthedocs.io/en/latest/) — Python HTTP library for humans
- [Pygments](https://pygments.org/) — Python syntax highlighter
#### HTTPie friends
Expand Down
8 changes: 4 additions & 4 deletions docs/contributors/fetch.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""
Generate the contributors database.
FIXME: replace `requests` calls with the HTTPie API, when available.
FIXME: replace `niquests` calls with the HTTPie API, when available.
"""
import json
import os
Expand All @@ -14,7 +14,7 @@
from time import sleep
from typing import Any, Dict, Optional, Set

import requests
import niquests

FullNames = Set[str]
GitHubLogins = Set[str]
Expand Down Expand Up @@ -197,10 +197,10 @@ def fetch(url: str, params: Optional[Dict[str, str]] = None) -> UserInfo:
}
for retry in range(1, 6):
debug(f'[{retry}/5]', f'{url = }', f'{params = }')
with requests.get(url, params=params, headers=headers) as req:
with niquests.get(url, params=params, headers=headers) as req:
try:
req.raise_for_status()
except requests.exceptions.HTTPError as exc:
except niquests.exceptions.HTTPError as exc:
if exc.response.status_code == 403:
# 403 Client Error: rate limit exceeded for url: ...
now = int(datetime.utcnow().timestamp())
Expand Down
4 changes: 2 additions & 2 deletions httpie/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"""

__version__ = '3.2.2'
__date__ = '2022-05-06'
__version__ = '4.0.0'
__date__ = '2023-10-11'
__author__ = 'Jakub Roztocil'
__licence__ = 'BSD'
2 changes: 1 addition & 1 deletion httpie/adapters.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from httpie.cli.dicts import HTTPHeadersDict
from requests.adapters import HTTPAdapter
from niquests.adapters import HTTPAdapter


class HTTPieHTTPAdapter(HTTPAdapter):
Expand Down
2 changes: 1 addition & 1 deletion httpie/cli/argparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from textwrap import dedent
from urllib.parse import urlsplit

from requests.utils import get_netrc_auth
from niquests.utils import get_netrc_auth

from .argtypes import (
AuthCredentials, SSLCredentials, KeyValueArgType,
Expand Down
58 changes: 25 additions & 33 deletions httpie/client.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
import argparse
import http.client
import json
import sys
from contextlib import contextmanager
from time import monotonic
from typing import Any, Dict, Callable, Iterable
from urllib.parse import urlparse, urlunparse

import requests
import niquests
# noinspection PyPackageRequirements
import urllib3
from urllib3.util import SKIP_HEADER, SKIPPABLE_HEADERS
Expand Down Expand Up @@ -44,6 +42,7 @@ def collect_messages(
env: Environment,
args: argparse.Namespace,
request_body_read_callback: Callable[[bytes], None] = None,
prepared_request_readiness: Callable[[niquests.PreparedRequest], None] = None,
) -> Iterable[RequestsMessage]:
httpie_session = None
httpie_session_headers = None
Expand Down Expand Up @@ -88,7 +87,12 @@ def collect_messages(
# TODO: reflect the split between request and send kwargs.
dump_request(request_kwargs)

request = requests.Request(**request_kwargs)
hooks = None

if prepared_request_readiness:
hooks = {"pre_send": [prepared_request_readiness]}

request = niquests.Request(**request_kwargs, hooks=hooks)
prepared_request = requests_session.prepare_request(request)
transform_headers(request, prepared_request)
if args.path_as_is:
Expand All @@ -110,12 +114,13 @@ def collect_messages(
url=prepared_request.url,
**send_kwargs_mergeable_from_env,
)
with max_headers(args.max_headers):
response = requests_session.send(
request=prepared_request,
**send_kwargs_merged,
**send_kwargs,
)
response = requests_session.send(
request=prepared_request,
**send_kwargs_merged,
**send_kwargs,
)
if args.max_headers and len(response.headers) > args.max_headers:
raise niquests.ConnectionError(f"got more than {args.max_headers} headers")
response._httpie_headers_parsed_at = monotonic()
expired_cookies += get_expired_cookies(
response.headers.get('Set-Cookie', '')
Expand All @@ -124,7 +129,7 @@ def collect_messages(
response_count += 1
if response.next:
if args.max_redirects and response_count == args.max_redirects:
raise requests.TooManyRedirects
raise niquests.TooManyRedirects
if args.follow:
prepared_request = response.next
if args.all:
Expand All @@ -140,25 +145,12 @@ def collect_messages(
httpie_session.save()


# noinspection PyProtectedMember
@contextmanager
def max_headers(limit):
# <https://github.com/httpie/cli/issues/802>
# noinspection PyUnresolvedReferences
orig = http.client._MAXHEADERS
http.client._MAXHEADERS = limit or float('Inf')
try:
yield
finally:
http.client._MAXHEADERS = orig


def build_requests_session(
verify: bool,
ssl_version: str = None,
ciphers: str = None,
) -> requests.Session:
requests_session = requests.Session()
) -> niquests.Session:
requests_session = niquests.Session()

# Install our adapter.
http_adapter = HTTPieHTTPAdapter()
Expand Down Expand Up @@ -186,7 +178,7 @@ def build_requests_session(

def dump_request(kwargs: dict):
sys.stderr.write(
f'\n>>> requests.request(**{repr_dict(kwargs)})\n\n')
f'\n>>> niquests.request(**{repr_dict(kwargs)})\n\n')


def finalize_headers(headers: HTTPHeadersDict) -> HTTPHeadersDict:
Expand All @@ -210,13 +202,13 @@ def finalize_headers(headers: HTTPHeadersDict) -> HTTPHeadersDict:


def transform_headers(
request: requests.Request,
prepared_request: requests.PreparedRequest
request: niquests.Request,
prepared_request: niquests.PreparedRequest
) -> None:
"""Apply various transformations on top of the `prepared_requests`'s
headers to change the request prepreation behavior."""

# Remove 'Content-Length' when it is misplaced by requests.
# Remove 'Content-Length' when it is misplaced by niquests.
if (
prepared_request.method in IGNORE_CONTENT_LENGTH_METHODS
and prepared_request.headers.get('Content-Length') == '0'
Expand All @@ -232,7 +224,7 @@ def transform_headers(

def apply_missing_repeated_headers(
original_headers: HTTPHeadersDict,
prepared_request: requests.PreparedRequest
prepared_request: niquests.PreparedRequest
) -> None:
"""Update the given `prepared_request`'s headers with the original
ones. This allows the requests to be prepared as usual, and then later
Expand Down Expand Up @@ -291,7 +283,7 @@ def make_send_kwargs_mergeable_from_env(args: argparse.Namespace) -> dict:
cert = args.cert
if args.cert_key:
# Having a client certificate key passphrase is not supported
# by requests. So we are using our own transportation structure
# by niquests. So we are using our own transportation structure
# which is compatible with their format (a tuple of minimum two
# items).
#
Expand Down Expand Up @@ -329,7 +321,7 @@ def make_request_kwargs(
request_body_read_callback=lambda chunk: chunk
) -> dict:
"""
Translate our `args` into `requests.Request` keyword arguments.
Translate our `args` into `niquests.Request` keyword arguments.
"""
files = args.files
Expand Down
105 changes: 97 additions & 8 deletions httpie/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
import socket
from typing import List, Optional, Union, Callable

import requests
import niquests
from pygments import __version__ as pygments_version
from requests import __version__ as requests_version
from niquests import __version__ as requests_version

from . import __version__ as httpie_version
from .cli.constants import OUT_REQ_BODY
Expand Down Expand Up @@ -112,16 +112,16 @@ def handle_generic_error(e, annotation=None):
if include_traceback:
raise
exit_status = ExitStatus.ERROR
except requests.Timeout:
except niquests.Timeout:
exit_status = ExitStatus.ERROR_TIMEOUT
env.log_error(f'Request timed out ({parsed_args.timeout}s).')
except requests.TooManyRedirects:
except niquests.TooManyRedirects:
exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS
env.log_error(
f'Too many redirects'
f' (--max-redirects={parsed_args.max_redirects}).'
)
except requests.exceptions.ConnectionError as exc:
except niquests.exceptions.ConnectionError as exc:
annotation = None
original_exc = unwrap_context(exc)
if isinstance(original_exc, socket.gaierror):
Expand Down Expand Up @@ -175,8 +175,8 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
# TODO: Refactor and drastically simplify, especially so that the separator logic is elsewhere.
exit_status = ExitStatus.SUCCESS
downloader = None
initial_request: Optional[requests.PreparedRequest] = None
final_response: Optional[requests.Response] = None
initial_request: Optional[niquests.PreparedRequest] = None
final_response: Optional[niquests.Response] = None
processing_options = ProcessingOptions.from_raw_args(args)

def separate():
Expand Down Expand Up @@ -204,8 +204,94 @@ def request_body_read_callback(chunk: bytes):
args.follow = True # --download implies --follow.
downloader = Downloader(env, output_file=args.output_file, resume=args.download_resume)
downloader.pre_request(args.headers)

def prepared_request_readiness(pr):
nonlocal output_options, do_write_body, processing_options

if initial_request == pr:
if args.debug and pr.conn_info and pr.conn_info.destination_address:
sys.stderr.write(
f"""\n>>> Connected to {pr.conn_info.destination_address[0]} port {pr.conn_info.destination_address[1]}\n"""
)

if args.debug and pr.conn_info:
if pr.conn_info.cipher:
sys.stderr.write(
f"""\n>>> Connection secured using {pr.conn_info.tls_version.name.replace('_', '.')} / {pr.conn_info.cipher}\n\n"""
)

if pr.conn_info.certificate_dict:
sys.stderr.write(">>> Server certificate:\n")

if "subject" in pr.conn_info.certificate_dict:
sys.stderr.write(
f">>> subject: "
)

for entry in pr.conn_info.certificate_dict['subject']:
if len(entry) == 2:
rdns, value = entry
elif len(entry) == 1:
rdns, value = entry[0]
else:
continue

sys.stderr.write(f'{rdns}="{value}"; ')

sys.stderr.write("\n")

sys.stderr.write(f">>> start date: {pr.conn_info.certificate_dict['notBefore']}\n")
sys.stderr.write(f">>> expire date: {pr.conn_info.certificate_dict['notAfter']}\n")

if "subjectAltName" in pr.conn_info.certificate_dict:
sys.stderr.write(
f">>> subjectAltName: "
)

for entry in pr.conn_info.certificate_dict['subjectAltName']:
if len(entry) == 2:
rdns, value = entry
sys.stderr.write(f'{rdns}="{value}"; ')

sys.stderr.write("\n")

if "issuer" in pr.conn_info.certificate_dict:
sys.stderr.write(
f">>> issuer: "
)

for entry in pr.conn_info.certificate_dict['issuer']:
if len(entry) == 2:
rdns, value = entry
elif len(entry) == 1:
rdns, value = entry[0]
else:
continue

sys.stderr.write(f'{rdns}="{value}"; ')

sys.stderr.write("\n\n")

if pr.ocsp_verified is None:
sys.stderr.write(">>> Revocation status: Unverified\n\n")
elif pr.ocsp_verified:
sys.stderr.write(f">>> Revocation status: Good\n\n")
else:
sys.stderr.write(f">>> Revocation status: Error\n\n")
else:
sys.stderr.write("\n")

write_message(
requests_message=pr,
env=env,
output_options=output_options._replace(
body=do_write_body
),
processing_options=processing_options
)

messages = collect_messages(env, args=args,
request_body_read_callback=request_body_read_callback)
request_body_read_callback=request_body_read_callback, prepared_request_readiness=prepared_request_readiness)
force_separator = False
prev_with_body = False

Expand All @@ -225,6 +311,9 @@ def request_body_read_callback(chunk: bytes):
is_streamed_upload = not isinstance(message.body, (str, bytes))
do_write_body = not is_streamed_upload
force_separator = is_streamed_upload and env.stdout_isatty
if message.conn_info is None and not args.offline:
prev_with_body = output_options.body
continue
else:
final_response = message
if args.check_status or downloader:
Expand Down
Loading

0 comments on commit faf9aef

Please sign in to comment.