diff --git a/.github/workflows/release-brew.yml b/.github/workflows/release-brew.yml index d58e6b6e3d..3ececcd303 100644 --- a/.github/workflows/release-brew.yml +++ b/.github/workflows/release-brew.yml @@ -11,7 +11,7 @@ on: jobs: brew-release: name: Release the Homebrew Package - runs-on: macos-latest + runs-on: macos-13 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/test-package-mac-brew.yml b/.github/workflows/test-package-mac-brew.yml index babdaa5def..38b42aa301 100644 --- a/.github/workflows/test-package-mac-brew.yml +++ b/.github/workflows/test-package-mac-brew.yml @@ -9,7 +9,7 @@ on: jobs: brew: - runs-on: macos-latest + runs-on: macos-13 steps: - uses: actions/checkout@v3 - name: Setup brew diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0dbfae4edb..a8ac8aaa3d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -24,15 +24,15 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-22.04, macos-13, windows-latest] python-version: + - '3.13' - '3.12' - '3.11' - '3.10' - '3.9' - '3.8' - '3.7' - pyopenssl: [0, 1] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 @@ -45,12 +45,8 @@ jobs: python -m pip install --upgrade pip wheel python -m pip install --upgrade '.[dev]' python -m pytest --verbose ./httpie ./tests - env: - HTTPIE_TEST_WITH_PYOPENSSL: ${{ matrix.pyopenssl }} - name: Linux & Mac setup if: matrix.os != 'windows-latest' run: | make install make test - env: - HTTPIE_TEST_WITH_PYOPENSSL: ${{ matrix.pyopenssl }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 310547435e..bb7b879701 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,11 +3,45 @@ This document records all notable changes to [HTTPie](https://httpie.io). This project adheres to [Semantic Versioning](https://semver.org/). +## [4.0.0](https://github.com/httpie/cli/compare/3.2.3...master) (unreleased) + +- Switched from the [`requests`](https://github.com/psf/requests) library to the compatible [`niquests`](https://github.com/jawah/niquests). ([#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for HTTP/2, and HTTP/3 protocols. ([#523](https://github.com/httpie/cli/issues/523), [#692](https://github.com/httpie/cli/issues/692), [#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for early (informational) responses. ([#752](https://github.com/httpie/cli/issues/752)) ([#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for IPv4/IPv6 enforcement with `-6` and `-4`. ([#94](https://github.com/httpie/cli/issues/94), [#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for Happy Eyeballs algorithm via `--heb` flag (disabled by default). [#1599](https://github.com/httpie/cli/issues/1599) [#1531](https://github.com/httpie/cli/pull/1531) +- Added support for alternative DNS resolvers via `--resolver`. DNS over HTTPS, DNS over TLS, DNS over QUIC, and DNS over UDP are accepted. ([#99](https://github.com/httpie/cli/issues/99), [#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for binding to a specific network adapter with `--interface`. ([#1422](https://github.com/httpie/cli/issues/1422), [#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for specifying the local port with `--local-port`. ([#1456](https://github.com/httpie/cli/issues/1456), [#1531](https://github.com/httpie/cli/pull/1531)) +- Added request metadata for the TLS certificate, negotiated version with cipher, the revocation status and the remote peer IP address. ([#1495](https://github.com/httpie/cli/issues/1495), [#1023](https://github.com/httpie/cli/issues/1023), [#826](https://github.com/httpie/cli/issues/826), [#1531](https://github.com/httpie/cli/pull/1531)) +- Added support to load the operating system trust store for the peer certificate validation. ([#480](https://github.com/httpie/cli/issues/480), [#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for using the system trust store to retrieve root CAs for verifying TLS certificates. ([#1531](https://github.com/httpie/cli/pull/1531)) +- Added detailed timings in response metadata with DNS resolution, established, TLS handshake, and request sending delays. ([#1023](https://github.com/httpie/cli/issues/1023), [#1531](https://github.com/httpie/cli/pull/1531)) +- Added automated resolution of hosts ending with `.localhost` to the default loopback address. ([#1458](https://github.com/httpie/cli/issues/1458), [#1527](https://github.com/httpie/cli/issues/1527)) +- Fixed the case when multiple headers where concatenated in the response output. ([#1413](https://github.com/httpie/cli/issues/1413), [#1531](https://github.com/httpie/cli/pull/1531)) +- Fixed an edge case where HTTPie could be lead to believe data was passed in stdin, thus sending a POST by default. ([#1551](https://github.com/httpie/cli/issues/1551), [#1531](https://github.com/httpie/cli/pull/1531)) + This fix has the particularity to consider 0 byte long stdin buffer as absent stdin. Empty stdin buffer will be ignored. +- Improved performance while downloading by setting chunk size to `-1` to retrieve packets as they arrive. ([#1531](https://github.com/httpie/cli/pull/1531)) +- Fixed multipart form data having filename not rfc2231 compliant when name contain non-ascii characters. ([#1401](https://github.com/httpie/cli/issues/1401)) +- Fixed issue where the configuration directory was not created at runtime that made the update fetcher run everytime. ([#1527](https://github.com/httpie/cli/issues/1527)) +- Fixed cookie persistence in HTTPie session when targeting localhost. They were dropped due to the standard library. ([#1527](https://github.com/httpie/cli/issues/1527)) +- Fixed downloader when trying to fetch compressed content. The process will no longer exit with the "Incomplete download" error. ([#1554](https://github.com/httpie/cli/issues/1554), [#423](https://github.com/httpie/cli/issues/423), [#1527](https://github.com/httpie/cli/issues/1527)) +- Fixed downloader yielding an incorrect speed when the remote is using `Content-Encoding` aka. compressed body. ([#1554](https://github.com/httpie/cli/issues/1554), [#423](https://github.com/httpie/cli/issues/423), [#1527](https://github.com/httpie/cli/issues/1527)) +- Removed support for preserving the original casing of HTTP headers. This comes as a constraint of newer protocols, namely HTTP/2+ that normalize header keys by default. From the HTTPie user perspective, they are "prettified" in the output by default. e.g. `x-hello-world` is displayed as `X-Hello-World`. +- Removed support for `pyopenssl`. ([#1531](https://github.com/httpie/cli/pull/1531)) +- Removed support for dead SSL protocols < TLS 1.0 (e.g. sslv3) as per pyopenssl removal. ([#1531](https://github.com/httpie/cli/pull/1531)) +- Removed dependency on `requests_toolbelt` in favor of directly including `MultipartEncoder` into HTTPie due to its direct dependency to requests. ([#1531](https://github.com/httpie/cli/pull/1531)) +- Removed dependency on `multidict` in favor of an internal one due to often missing pre-built wheels. ([#1522](https://github.com/httpie/cli/issues/1522), [#1531](https://github.com/httpie/cli/pull/1531)) + +Existing plugins are expected to work without any changes. The only caveat would be that certain plugins explicitly require `requests`. +Future contributions may be made in order to relax the constraints where applicable. + ## [3.2.3](https://github.com/httpie/cli/compare/3.2.2...3.2.3) (2024-07-10) - Fix SSL connections by pinning the `requests` version to `2.31.0`. (#1583, #1581) - Make it possible to [unset](https://httpie.io/docs/cli/default-request-headers) the `User-Agent` and `Accept-Encoding` request headers. ([#1502](https://github.com/httpie/cli/issues/1502)) + ## [3.2.2](https://github.com/httpie/cli/compare/3.2.1...3.2.2) (2023-05-19) - Fixed compatibility with urllib3 2.0.0. ([#1499](https://github.com/httpie/cli/issues/1499)) diff --git a/docs/README.md b/docs/README.md index 65c7ebe4ed..8d339be817 100644 --- a/docs/README.md +++ b/docs/README.md @@ -283,6 +283,13 @@ $ http --version Note that on your machine, the version name will have the `.dev0` suffix. +### HTTP/3 support + +Support for HTTP/3 is available by default if both your interpreter and architecture are served by `qh3` published pre-built wheels. +The underlying library Niquests does not enforce its installation in order to avoid friction for most users. + +See https://urllib3future.readthedocs.io/en/latest/user-guide.html#http-2-and-http-3-support to learn more. + ## Usage Hello World: @@ -1186,22 +1193,24 @@ You can read headers from a file by using the `:@` operator. This would also eff $ http pie.dev/headers X-Data:@files/text.txt ``` -### Empty headers and header un-setting +### Empty request headers -To unset a previously specified header (such a one of the default headers), use `Header:`: +To send a header with an empty value, use `Header;`, with a semicolon: ```bash -$ http pie.dev/headers Accept: User-Agent: +$ http pie.dev/headers 'Header;' ``` -To send a header with an empty value, use `Header;`, with a semicolon: +### Header un-setting + +To unset a previously specified header or one of the [default headers](#default-request-headers), use the `Header:` notation: ```bash -$ http pie.dev/headers 'Header;' +$ http pie.dev/headers Accept: User-Agent: ``` -Please note that some internal headers, such as `Content-Length`, can’t be unset if -they are automatically added by the client itself. +Please note that some internal headers, such as `Content-Length`, can’t be unset if they are automatically added by the client itself. +Also, the `Host` header cannot be unset due to support for HTTP/2+ (internally translated into `:authority`) ### Multiple header values with the same name @@ -1530,13 +1539,13 @@ $ http --cert=client.pem --cert-key=client.key --cert-key-pass=my_password https ### SSL version Use the `--ssl=` option to specify the desired protocol version to use. -This will default to SSL v2.3 which will negotiate the highest protocol that both the server and your installation of OpenSSL support. -The available protocols are `ssl2.3`, `ssl3`, `tls1`, `tls1.1`, `tls1.2`, `tls1.3`. +If not specified, it tries to negotiate the highest protocol that both the server and your installation of OpenSSL support. +The available protocols are `tls1`, `tls1.1`, `tls1.2`, `tls1.3`. (The actually available set of protocols may vary depending on your OpenSSL installation.) ```bash -# Specify the vulnerable SSL v3 protocol to talk to an outdated server: -$ http --ssl=ssl3 https://vulnerable.example.org +# Specify the vulnerable TLS 1 protocol to talk to an outdated server: +$ http --ssl=tls1 https://vulnerable.example.org ``` ### SSL ciphers @@ -1562,9 +1571,9 @@ be printed via several options: |---------------------------:|----------------------------------------------------------------------------------------------------| | `--headers, -h` | Only the response headers are printed | | `--body, -b` | Only the response body is printed | -| `--meta, -m` | Only the [response metadata](#response-meta) is printed | +| `--meta, -m` | Only the [request, response metadata](#response-meta) are printed | | `--verbose, -v` | Print the whole HTTP exchange (request and response). This option also enables `--all` (see below) | -| `--verbose --verbose, -vv` | Just like `-v`, but also include the response metadata. | +| `--verbose --verbose, -vv` | Just like `-v`, but also include the request, and response metadata. | | `--print, -p` | Selects parts of the HTTP exchange | | `--quiet, -q` | Don’t print anything to `stdout` and `stderr` | @@ -1573,13 +1582,13 @@ be printed via several options: All the other [output options](#output-options) are under the hood just shortcuts for the more powerful `--print, -p`. It accepts a string of characters each of which represents a specific part of the HTTP exchange: -| Character | Stands for | -|----------:|---------------------------------| -| `H` | request headers | -| `B` | request body | -| `h` | response headers | -| `b` | response body | -| `m` | [response meta](#response-meta) | +| Character | Stands for | +|----------:|------------------------------------------| +| `H` | request headers | +| `B` | request body | +| `h` | response headers | +| `b` | response body | +| `m` | [request, response meta](#response-meta) | Print request and response headers: @@ -1592,27 +1601,51 @@ $ http --print=Hh PUT pie.dev/put hello=world The response metadata section currently includes the total time elapsed. It’s the number of seconds between opening the network connection and downloading the last byte of response the body. -To _only_ show the response metadata, use `--meta, -m` (analogically to `--headers, -h` and `--body, -b`): +To _only_ show the request, and response metadata, use `--meta, -m` (analogically to `--headers, -h` and `--body, -b`): ```bash $ http --meta pie.dev/delay/1 ``` ```console -Elapsed time: 1.099171542s +Connected to: 2a06:98c1:3120::2 port 80 + +Elapsed DNS: 0.000833s +Elapsed established connection: 0.020144s +Elapsed emitting request: 0.000121s +Elapsed time: 1.080282s ``` The [extra verbose `-vv` output](#extra-verbose-output) includes the meta section by default. You can also show it in combination with other parts of the exchange via [`--print=m`](#what-parts-of-the-http-exchange-should-be-printed). For example, here we print it together with the response headers: ```bash -$ http --print=hm pie.dev/get +$ https --print=hm pie.dev/get ``` ```http -HTTP/1.1 200 OK +Connected to: 2a06:98c1:3120::2 port 443 +Connection secured using: TLSv1.3 with AES-256-GCM-SHA384 +Server certificate: commonName="pie.dev"; DNS="*.pie.dev"; DNS="pie.dev" +Certificate validity: "Mar 08 00:16:33 2024 UTC" to "Jun 06 00:16:32 2024 UTC" +Issuer: countryName="US"; organizationName="Let's Encrypt"; commonName="E1" +Revocation status: Good + +HTTP/3 200 OK +Access-Control-Allow-Credentials: true +Access-Control-Allow-Origin: * +Alt-Svc: h3=":443"; ma=86400 +Cf-Cache-Status: DYNAMIC +Cf-Ray: 867351f9cf37d4fe-CDG +Content-Encoding: br Content-Type: application/json +Date: Wed, 20 Mar 2024 05:32:11 GMT +Server: cloudflare -Elapsed time: 0.077538375s +Elapsed DNS: 0.000629s +Elapsed established connection: 0.000013s +Elapsed TLS handshake: 0.043979s +Elapsed emitting request: 0.000257s +Elapsed time: 0.159567s ``` @@ -1626,19 +1659,19 @@ If you [use `--style` with one of the Pie themes](#colors-and-formatting), you `--verbose` can often be useful for debugging the request and generating documentation examples: ```bash -$ http --verbose PUT pie.dev/put hello=world -PUT /put HTTP/1.1 +$ https --verbose PUT pie.dev/put hello=world +PUT /put HTTP/2 Accept: application/json, */*;q=0.5 Accept-Encoding: gzip, deflate Content-Type: application/json Host: pie.dev -User-Agent: HTTPie/0.2.7dev +User-Agent: HTTPie/4.0.0 { "hello": "world" } -HTTP/1.1 200 OK +HTTP/2 200 OK Connection: keep-alive Content-Length: 477 Content-Type: application/json @@ -1652,10 +1685,10 @@ Server: gunicorn/0.13.4 #### Extra verbose output -If you run HTTPie with `-vv` or `--verbose --verbose`, then it would also display the [response metadata](#response-meta). +If you run HTTPie with `-vv` or `--verbose --verbose`, then it would also display the [response and request metadata](#response-meta). ```bash -# Just like the above, but with additional columns like the total elapsed time +# Just like the above, but with additional columns like the total elapsed time, remote peer connection informations $ http -vv pie.dev/get ``` @@ -1833,6 +1866,153 @@ $ http --chunked pie.dev/post @files/data.xml $ cat files/data.xml | http --chunked pie.dev/post ``` +## Supported HTTP versions + +HTTPie has full support for HTTP/1.1, HTTP/2, and HTTP/3. + +### Disable HTTP/2, or HTTP/3 + +You can at your own discretion toggle on and off HTTP/1, HTTP/2, or/and HTTP/3. + +```bash +$ https --disable-http2 PUT pie.dev/put hello=world +``` + +```bash +$ https --disable-http3 --disable-http1 PUT pie.dev/put hello=world +``` + +### Force HTTP/3, HTTP/2 or HTTP/1.1 + +By opposition to the previous section, you can force the HTTP/3, HTTP/2 or HTTP/1.1 negotiation. + +```bash +$ https --http3 pie.dev/get +``` + +For HTTP (unencrypted) URLs, you can enforce HTTP 1 or HTTP 2 but not HTTP 3. +You cannot enforce multiple protocols like `--http2 --http3`, they (toggles) are mutually exclusive. + +### Protocol selection + +By default, HTTPie follows what modern browser do to choose a protocol. + +#### For HTTP URLs + +HTTP/1.1 will always be chosen unless you specified `--http2` to enforce HTTP/2 with prior knowledge (also known as h2c). + +Notes: + +- You cannot enforce HTTP/3. +- You cannot disable both HTTP/1.1 and HTTP/2. + +#### For HTTPS URLs + +When reaching to an SSL/TLS server, HTTPie negotiate the protocol through what is called the ALPN extension during +the handshake. + +Basically, HTTPie says during the "Hello" phase: "I can speak HTTP/1.1 and HTTP/2 over TCP, and you?". +Depending on what the server respond to us, we will choose a mutual supported protocols. + +Nowadays, it is most certainly be HTTP/2 by default. + +Some specifics: + +- You cannot disable all three protocols. +- Those toggles do not apply to the DNS-over-HTTPS custom resolver. You will have to specify it within the resolver URL. +- When reaching a HTTPS URL, the ALPN extension sent during SSL/TLS handshake is affected. +- HTTPie never tries HTTP/3 by default unless something hints us that it is possible. + +##### HTTP 3 Negotiation + +By default, HTTPie cannot negotiate HTTP/3 without a first HTTP/1.1, or HTTP/2 successful response unless the +remote host specified a DNS HTTPS record that indicate its support (and by using a custom DNS resolver, see bellow section). + +The remote server yield its support for HTTP/3 in the `Alt-Svc` header, if present HTTPie will issue +the successive requests via HTTP/3. You may use that argument in case the remote peer does not support +either HTTP/1.1 or HTTP/2. + +Note: HTTPie caches what server are QUIC compatible in the `config` directory so that we can remember. + +## Custom DNS resolver + +### Using DNS url + +You can specify one or many custom DNS resolvers using the `--resolver` flag. They will be tested in +presented order to resolver given hostname. + +```bash +$ https --resolver "doh+cloudflare://" pie.dev/get +``` + +To know more about DNS url and supported protocols, visit the [Niquests documentation](https://niquests.readthedocs.io/en/stable/user/quickstart.html#dns-resolution). + +### Forcing hostname to resolve with a manual entry + +It is possible to fake DNS resolution using a virtual resolver. We'll make use of the `--resolver` flag +using the `in-memory` provider. + +```bash +$ https --resolver "in-memory://default/?hosts=pie.dev:10.10.4.1" pie.dev/get +``` + +In that example, `pie.dev` will resolve to `10.10.4.1`. The TLS HELLO / SNI will be set with host = `pie.dev`. + +HTTPie allows to pass directly the hostname and associated IPs directly as a shortcut to previous the example like so: + +```bash +$ https --resolver "pie.dev:10.10.4.1" pie.dev/get +``` + +You can specify multiple entries, concatenated with a comma: + +```bash +$ https --resolver "pie.dev:10.10.4.1,re.pie.dev:10.10.8.1" pie.dev/get +``` + +## Happy Eyeballs + +By default, when HTTPie establish the connection it asks for the IP(v4 or v6) records of +the requested domain and then tries them sequentially preferring IPv6 by default. This +may induce longer connection delays and in some case hangs due to an unresponsive endpoint. +To concurrently try to connect to available IP(v4 or v6), set the following flag: + +```bash +$ https --heb pie.dev/get +``` + +## Network interface + +In order to bind emitted request from a specific network adapter you can use the `--interface` flag. + +```bash +$ https --interface 172.17.0.1 pie.dev/get +``` + +## Local port + +You can choose to select the outgoing (local) network port manually by passing the `--local-port` flag. + +```bash +$ https --local-port 5411 pie.dev/get +``` + +or using a range. (bellow example will pick a free port between 5000 and 10000) + +```bash +$ https --local-port 5000-10000 pie.dev/get +``` + +Beware that some ports requires elevated privileges. + +## Enforcing IPv4 or IPv6 + +Since HTTPie 4, you may pass the flags `--ipv4, -4` or `--ipv6, -6` to enforce connecting to an IPv4 or IPv6 address. + +```bash +$ https -4 pie.dev/get +``` + ## Compressed request body You can use the `--compress, -x` flag to instruct HTTPie to use `Content-Encoding: deflate` and compress the request data: @@ -2006,13 +2186,23 @@ $ http --download https://github.com/httpie/cli/archive/master.tar.gz ``` ```http -HTTP/1.1 200 OK -Content-Disposition: attachment; filename=httpie-master.tar.gz -Content-Length: 257336 +HTTP/2 200 OK +Access-Control-Allow-Origin: https://render.githubusercontent.com +Content-Disposition: attachment; filename=cli-master.tar.gz +Content-Security-Policy: default-src 'none'; style-src 'unsafe-inline'; sandbox Content-Type: application/x-gzip +Cross-Origin-Resource-Policy: cross-origin +Date: Wed, 20 Mar 2024 05:37:32 GMT +Etag: "68a7a50930daac494551b4576eef285f86da741c9b4a0f3a1deeac9fce4e80d4" +Strict-Transport-Security: max-age=31536000 +Vary: Authorization,Accept-Encoding,Origin +X-Content-Type-Options: nosniff +X-Frame-Options: deny +X-Github-Request-Id: 9E06:20C449:55A32E:636ADA:65FA761C +X-Xss-Protection: 1; mode=block -Downloading 251.30 kB to "httpie-master.tar.gz" -Done. 251.30 kB in 2.73862s (91.76 kB/s) +Downloading to cli-master.tar.gz +Done. 1.3 MB in 00:0.54267 (2.4 MB/s) ``` ### Downloaded filename @@ -2556,7 +2746,7 @@ HTTPie has the following community channels: Under the hood, HTTPie uses these two amazing libraries: -- [Requests](https://requests.readthedocs.io/en/latest/) — Python HTTP library for humans +- [Niquests](https://niquests.readthedocs.io/en/latest/) — Python HTTP library for humans - [Pygments](https://pygments.org/) — Python syntax highlighter #### HTTPie friends diff --git a/docs/contributors/fetch.py b/docs/contributors/fetch.py index ba94c28183..1ea1e8d05a 100644 --- a/docs/contributors/fetch.py +++ b/docs/contributors/fetch.py @@ -1,7 +1,7 @@ """ Generate the contributors database. -FIXME: replace `requests` calls with the HTTPie API, when available. +FIXME: replace `niquests` calls with the HTTPie API, when available. """ import json import os @@ -14,7 +14,7 @@ from time import sleep from typing import Any, Dict, Optional, Set -import requests +import niquests FullNames = Set[str] GitHubLogins = Set[str] @@ -197,10 +197,10 @@ def fetch(url: str, params: Optional[Dict[str, str]] = None) -> UserInfo: } for retry in range(1, 6): debug(f'[{retry}/5]', f'{url = }', f'{params = }') - with requests.get(url, params=params, headers=headers) as req: + with niquests.get(url, params=params, headers=headers) as req: try: req.raise_for_status() - except requests.exceptions.HTTPError as exc: + except niquests.exceptions.HTTPError as exc: if exc.response.status_code == 403: # 403 Client Error: rate limit exceeded for url: ... now = int(datetime.utcnow().timestamp()) diff --git a/extras/man/http.1 b/extras/man/http.1 index 09e4237006..fe6acba454 100644 --- a/extras/man/http.1 +++ b/extras/man/http.1 @@ -1,5 +1,5 @@ .\" This file is auto-generated from the parser declaration in httpie/cli/definition.py by extras/scripts/generate_man_pages.py. -.TH http 1 "2024-07-10" "HTTPie 3.2.3" "HTTPie Manual" +.TH http 1 "2024-06-25" "HTTPie 4.0.0" "HTTPie Manual" .SH NAME http .SH SYNOPSIS @@ -427,6 +427,18 @@ and $HTTPS_proxy are supported as well. +.IP "\fB\,--ipv6\/\fR, \fB\,-6\/\fR" + + +Force using a IPv6 address to reach the remote peer. + + +.IP "\fB\,--ipv4\/\fR, \fB\,-4\/\fR" + + +Force using a IPv4 address to reach the remote peer. + + .IP "\fB\,--follow\/\fR, \fB\,-F\/\fR" @@ -484,6 +496,82 @@ Bypass dot segment (/../ or /./) URL squashing. Enable streaming via chunked transfer encoding. The Transfer-Encoding header is set to chunked. +.IP "\fB\,--disable-http1\/\fR" + + +Disable the HTTP/1 protocol. + + +.IP "\fB\,--http1\/\fR" + + +Use the HTTP/1 protocol for the request. + + +.IP "\fB\,--disable-http2\/\fR" + + +Disable the HTTP/2 protocol. + + +.IP "\fB\,--http2\/\fR" + + +Use the HTTP/2 protocol for the request. + + +.IP "\fB\,--disable-http3\/\fR" + + +Disable the HTTP/3 over QUIC protocol. + + +.IP "\fB\,--http3\/\fR" + + +By default, HTTPie cannot negotiate HTTP/3 without a first HTTP/1.1, or HTTP/2 successful response unless the +remote host specified a DNS HTTPS record that indicate its support. + +The remote server yield its support for HTTP/3 in the Alt-Svc header, if present HTTPie will issue +the successive requests via HTTP/3. You may use that argument in case the remote peer does not support +either HTTP/1.1 or HTTP/2. + + + +.IP "\fB\,--heb\/\fR" + + +By default, when HTTPie establish the connection it asks for the IP(v4 or v6) records of +the requested domain and then tries them sequentially preferring IPv6 by default. This +may induce longer connection delays and in some case hangs due to an unresponsive endpoint. +To concurrently try to connect to available IP(v4 or v6), set this flag. + + + +.IP "\fB\,--resolver\/\fR" + + +By default, HTTPie use the system DNS through Python standard library. +You can specify an alternative DNS server to be used. (e.g. doh://cloudflare-dns.com or doh://google.dns). +You can specify multiple resolvers with different protocols. The environment +variable $NIQUESTS_DNS_URL is supported as well. This flag also support overriding DNS resolution +e.g. passing \[dq]pie.dev:1.1.1.1\[dq] will resolve pie.dev to 1.1.1.1 IPv4. + + + +.IP "\fB\,--interface\/\fR" + + +Bind to a specific network interface. + + +.IP "\fB\,--local-port\/\fR" + + +It can be either a port range (e.g. \[dq]11221-14555\[dq]) or a single port. +Some port may require root privileges (e.g. < 1024). + + .PP .SH SSL .IP "\fB\,--verify\/\fR" @@ -498,11 +586,10 @@ variable instead.) .IP "\fB\,--ssl\/\fR" -The desired protocol version to use. This will default to -SSL v2.3 which will negotiate the highest protocol that both -the server and your installation of OpenSSL support. Available protocols -may vary depending on OpenSSL installation (only the supported ones -are shown here). +The desired protocol version to use. If not specified, it tries to +negotiate the highest protocol that both the server and your installation +of OpenSSL support. Available protocols may vary depending on OpenSSL +installation (only the supported ones are shown here). @@ -511,9 +598,10 @@ are shown here). A string in the OpenSSL cipher list format. +tls1.3 ciphers are always present regardless of your cipher list. -See `http \fB\,--help\/\fR` for the default ciphers list on you system. +See `http \fB\,--help\/\fR` for the default ciphers list. @@ -597,4 +685,4 @@ For every \fB\,--OPTION\/\fR there is also a \fB\,--no-OPTION\/\fR that reverts to its default value. Suggestions and bug reports are greatly appreciated: -https://github.com/httpie/cli/issues \ No newline at end of file +https://github.com/httpie/cli/issues diff --git a/extras/man/httpie.1 b/extras/man/httpie.1 index ba269b1acf..7f396ef9f9 100644 --- a/extras/man/httpie.1 +++ b/extras/man/httpie.1 @@ -1,5 +1,5 @@ .\" This file is auto-generated from the parser declaration in httpie/manager/cli.py by extras/scripts/generate_man_pages.py. -.TH httpie 1 "2024-07-10" "HTTPie 3.2.3" "HTTPie Manual" +.TH httpie 1 "2024-06-25" "HTTPie 4.0.0" "HTTPie Manual" .SH NAME httpie .SH SYNOPSIS @@ -97,4 +97,4 @@ targets to install .PP .SH httpie plugins list List all installed HTTPie plugins. -.PP \ No newline at end of file +.PP diff --git a/extras/man/https.1 b/extras/man/https.1 index dfb1b018ad..c6e450f484 100644 --- a/extras/man/https.1 +++ b/extras/man/https.1 @@ -1,5 +1,5 @@ .\" This file is auto-generated from the parser declaration in httpie/cli/definition.py by extras/scripts/generate_man_pages.py. -.TH https 1 "2024-07-10" "HTTPie 3.2.3" "HTTPie Manual" +.TH https 1 "2024-06-25" "HTTPie 4.0.0" "HTTPie Manual" .SH NAME https .SH SYNOPSIS @@ -427,6 +427,18 @@ and $HTTPS_proxy are supported as well. +.IP "\fB\,--ipv6\/\fR, \fB\,-6\/\fR" + + +Force using a IPv6 address to reach the remote peer. + + +.IP "\fB\,--ipv4\/\fR, \fB\,-4\/\fR" + + +Force using a IPv4 address to reach the remote peer. + + .IP "\fB\,--follow\/\fR, \fB\,-F\/\fR" @@ -484,6 +496,82 @@ Bypass dot segment (/../ or /./) URL squashing. Enable streaming via chunked transfer encoding. The Transfer-Encoding header is set to chunked. +.IP "\fB\,--disable-http1\/\fR" + + +Disable the HTTP/1 protocol. + + +.IP "\fB\,--http1\/\fR" + + +Use the HTTP/1 protocol for the request. + + +.IP "\fB\,--disable-http2\/\fR" + + +Disable the HTTP/2 protocol. + + +.IP "\fB\,--http2\/\fR" + + +Use the HTTP/2 protocol for the request. + + +.IP "\fB\,--disable-http3\/\fR" + + +Disable the HTTP/3 over QUIC protocol. + + +.IP "\fB\,--http3\/\fR" + + +By default, HTTPie cannot negotiate HTTP/3 without a first HTTP/1.1, or HTTP/2 successful response unless the +remote host specified a DNS HTTPS record that indicate its support. + +The remote server yield its support for HTTP/3 in the Alt-Svc header, if present HTTPie will issue +the successive requests via HTTP/3. You may use that argument in case the remote peer does not support +either HTTP/1.1 or HTTP/2. + + + +.IP "\fB\,--heb\/\fR" + + +By default, when HTTPie establish the connection it asks for the IP(v4 or v6) records of +the requested domain and then tries them sequentially preferring IPv6 by default. This +may induce longer connection delays and in some case hangs due to an unresponsive endpoint. +To concurrently try to connect to available IP(v4 or v6), set this flag. + + + +.IP "\fB\,--resolver\/\fR" + + +By default, HTTPie use the system DNS through Python standard library. +You can specify an alternative DNS server to be used. (e.g. doh://cloudflare-dns.com or doh://google.dns). +You can specify multiple resolvers with different protocols. The environment +variable $NIQUESTS_DNS_URL is supported as well. This flag also support overriding DNS resolution +e.g. passing \[dq]pie.dev:1.1.1.1\[dq] will resolve pie.dev to 1.1.1.1 IPv4. + + + +.IP "\fB\,--interface\/\fR" + + +Bind to a specific network interface. + + +.IP "\fB\,--local-port\/\fR" + + +It can be either a port range (e.g. \[dq]11221-14555\[dq]) or a single port. +Some port may require root privileges (e.g. < 1024). + + .PP .SH SSL .IP "\fB\,--verify\/\fR" @@ -498,11 +586,10 @@ variable instead.) .IP "\fB\,--ssl\/\fR" -The desired protocol version to use. This will default to -SSL v2.3 which will negotiate the highest protocol that both -the server and your installation of OpenSSL support. Available protocols -may vary depending on OpenSSL installation (only the supported ones -are shown here). +The desired protocol version to use. If not specified, it tries to +negotiate the highest protocol that both the server and your installation +of OpenSSL support. Available protocols may vary depending on OpenSSL +installation (only the supported ones are shown here). @@ -511,9 +598,10 @@ are shown here). A string in the OpenSSL cipher list format. +tls1.3 ciphers are always present regardless of your cipher list. -See `http \fB\,--help\/\fR` for the default ciphers list on you system. +See `http \fB\,--help\/\fR` for the default ciphers list. @@ -597,4 +685,4 @@ For every \fB\,--OPTION\/\fR there is also a \fB\,--no-OPTION\/\fR that reverts to its default value. Suggestions and bug reports are greatly appreciated: -https://github.com/httpie/cli/issues \ No newline at end of file +https://github.com/httpie/cli/issues diff --git a/extras/profiling/benchmarks.py b/extras/profiling/benchmarks.py index 9d409debbe..c262d46977 100644 --- a/extras/profiling/benchmarks.py +++ b/extras/profiling/benchmarks.py @@ -179,7 +179,7 @@ def run(self, context: Context) -> pyperf.Benchmark: [ '--print=HBhb', f'--pretty={pretty}', - 'httpbin.org/stream/1000' + 'pie.dev/stream/1000' ] ) DownloadRunner('download', '`http --download :/big_file.txt` (3GB)', '3G') diff --git a/httpie/__init__.py b/httpie/__init__.py index d67c919042..d3eb07335b 100644 --- a/httpie/__init__.py +++ b/httpie/__init__.py @@ -3,7 +3,7 @@ """ -__version__ = '3.2.3' -__date__ = '2024-07-10' +__version__ = '4.0.0' +__date__ = '2024-06-25' __author__ = 'Jakub Roztocil' __licence__ = 'BSD' diff --git a/httpie/adapters.py b/httpie/adapters.py index 8e2dd7397f..fa6cfcec89 100644 --- a/httpie/adapters.py +++ b/httpie/adapters.py @@ -1,5 +1,5 @@ from httpie.cli.dicts import HTTPHeadersDict -from requests.adapters import HTTPAdapter +from niquests.adapters import HTTPAdapter class HTTPieHTTPAdapter(HTTPAdapter): diff --git a/httpie/cli/argparser.py b/httpie/cli/argparser.py index 9bf09b3b73..4053b5809b 100644 --- a/httpie/cli/argparser.py +++ b/httpie/cli/argparser.py @@ -3,11 +3,12 @@ import os import re import sys +import threading from argparse import RawDescriptionHelpFormatter from textwrap import dedent from urllib.parse import urlsplit -from requests.utils import get_netrc_auth +from niquests.utils import get_netrc_auth from .argtypes import ( AuthCredentials, SSLCredentials, KeyValueArgType, @@ -24,9 +25,11 @@ ) from .exceptions import ParseError from .requestitems import RequestItems +from ..compat import has_ipv6_support from ..context import Environment from ..plugins.registry import plugin_manager from ..utils import ExplicitNullAuth, get_content_type +from ..uploads import observe_stdin_for_data_thread class HTTPieHelpFormatter(RawDescriptionHelpFormatter): @@ -164,15 +167,33 @@ def parse_args( and not self.args.ignore_stdin and not self.env.stdin_isatty ) - self.has_input_data = self.has_stdin_data or self.args.raw is not None # Arguments processing and environment setup. self._apply_no_options(no_options) + self._process_http_versions() self._process_request_type() self._process_download_options() self._setup_standard_streams() self._process_output_options() self._process_pretty_options() self._process_format_options() + self._process_ip_version_options() + + # bellow is a fix for detecting "false-or empty" stdin. + # see https://github.com/httpie/cli/issues/1551 for more information. + if self.has_stdin_data: + read_event = threading.Event() + observe_stdin_for_data_thread(env, self.env.stdin, read_event) + if ( + hasattr(self.env.stdin, 'buffer') + and hasattr(self.env.stdin.buffer, "peek") + and not self.env.stdin.buffer.peek(1) + ): + self.has_stdin_data = False + + read_event.set() + + self.has_input_data = self.has_stdin_data or self.args.raw is not None + self._guess_method() self._parse_items() self._process_url() @@ -193,6 +214,38 @@ def parse_args( return self.args + def _process_http_versions(self): + available, forced, disabled = ( + {1, 2, 3}, + { + self.args.force_http1 and 1, + self.args.force_http2 and 2, + self.args.force_http3 and 3, + } - {False}, + { + self.args.disable_http1 and 1, + self.args.disable_http2 and 2, + self.args.disable_http3 and 3, + } - {False}, + ) + if forced and disabled: + self.error( + 'You cannot both force a http protocol version and disable some other. e.g. ' + '--http2 already force HTTP/2, do not use --disable-http1 at the same time.' + ) + if len(forced) > 1: + self.error( + 'You may only force one of --http1, --http2 or --http3. Use --disable-http1, ' + '--disable-http2 or --disable-http3 instead if you prefer the excluding logic.' + ) + if disabled == available: + self.error('At least one HTTP protocol version must be enabled.') + + if forced: + self.args.disable_http1 = forced != {1} + self.args.disable_http2 = forced != {2} + self.args.disable_http3 = forced != {3} + def _process_request_type(self): request_type = self.args.request_type self.args.json = request_type is RequestType.JSON @@ -558,6 +611,15 @@ def _process_format_options(self): parsed_options = parse_format_options(options_group, defaults=parsed_options) self.args.format_options = parsed_options + def _process_ip_version_options(self): + if not has_ipv6_support() and self.args.force_ipv6: + self.error('Unable to force IPv6 because your system lack IPv6 support.') + if self.args.force_ipv4 and self.args.force_ipv6: + self.error( + 'Unable to force both IPv4 and IPv6, omit the flags to allow both. ' + 'The flags "-6" and "-4" are meant to force one of them.' + ) + def print_manual(self): from httpie.output.ui import man_pages @@ -598,6 +660,17 @@ def print_usage(self, file): def error(self, message): """Prints a usage message incorporating the message to stderr and exits.""" + + # We shall release the files in that case + # the process is going to quit early anyway. + if hasattr(self.args, "multipart_data"): + for f in self.args.multipart_data: + if isinstance(self.args.multipart_data[f], tuple): + self.args.multipart_data[f][1].close() + elif isinstance(self.args.multipart_data[f], list): + for item in self.args.multipart_data[f]: + item[1].close() + self.print_usage(sys.stderr) self.env.rich_error_console.print( dedent( diff --git a/httpie/cli/argtypes.py b/httpie/cli/argtypes.py index 8f19c3c51e..f1d9edd59c 100644 --- a/httpie/cli/argtypes.py +++ b/httpie/cli/argtypes.py @@ -259,7 +259,7 @@ def parse_format_options(s: str, defaults: Optional[dict]) -> dict: ) -def response_charset_type(encoding: str) -> str: +def response_charset_arg_type(encoding: str) -> str: try: ''.encode(encoding) except LookupError: @@ -268,8 +268,17 @@ def response_charset_type(encoding: str) -> str: return encoding -def response_mime_type(mime_type: str) -> str: +def response_mime_arg_type(mime_type: str) -> str: if mime_type.count('/') != 1: raise argparse.ArgumentTypeError( f'{mime_type!r} doesn’t look like a mime type; use type/subtype') return mime_type + + +def interface_arg_type(interface: str) -> str: + import ipaddress + try: + ipaddress.ip_interface(interface) + except ValueError as e: + raise argparse.ArgumentTypeError(str(e)) + return interface diff --git a/httpie/cli/definition.py b/httpie/cli/definition.py index 843b29c9cf..a58658f936 100644 --- a/httpie/cli/definition.py +++ b/httpie/cli/definition.py @@ -5,30 +5,50 @@ from argparse import FileType from httpie import __doc__, __version__ -from httpie.cli.argtypes import (KeyValueArgType, SessionNameValidator, - SSLCredentials, readable_file_arg, - response_charset_type, response_mime_type) -from httpie.cli.constants import (BASE_OUTPUT_OPTIONS, DEFAULT_FORMAT_OPTIONS, - OUT_REQ_BODY, OUT_REQ_HEAD, OUT_RESP_BODY, - OUT_RESP_HEAD, OUT_RESP_META, OUTPUT_OPTIONS, - OUTPUT_OPTIONS_DEFAULT, PRETTY_MAP, - PRETTY_STDOUT_TTY_ONLY, - SEPARATOR_GROUP_ALL_ITEMS, SEPARATOR_PROXY, - SORTED_FORMAT_OPTIONS_STRING, - UNSORTED_FORMAT_OPTIONS_STRING, RequestType) -from httpie.cli.options import ParserSpec, Qualifiers, to_argparse -from httpie.output.formatters.colors import (AUTO_STYLE, DEFAULT_STYLE, BUNDLED_STYLES, - get_available_styles) +from httpie.output.formatters.colors import ( + AUTO_STYLE, + BUNDLED_STYLES, + DEFAULT_STYLE, + get_available_styles, +) from httpie.plugins.builtin import BuiltinAuthPlugin from httpie.plugins.registry import plugin_manager from httpie.ssl_ import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS_STRING +from .argtypes import ( + KeyValueArgType, + SSLCredentials, + SessionNameValidator, + interface_arg_type, + readable_file_arg, + response_charset_arg_type, + response_mime_arg_type, +) +from .constants import ( + BASE_OUTPUT_OPTIONS, + DEFAULT_FORMAT_OPTIONS, + OUTPUT_OPTIONS, + OUTPUT_OPTIONS_DEFAULT, + OUT_REQ_BODY, + OUT_REQ_HEAD, + OUT_RESP_BODY, + OUT_RESP_HEAD, + OUT_RESP_META, + PRETTY_MAP, + PRETTY_STDOUT_TTY_ONLY, + RequestType, + SEPARATOR_GROUP_ALL_ITEMS, + SEPARATOR_PROXY, + SORTED_FORMAT_OPTIONS_STRING, + UNSORTED_FORMAT_OPTIONS_STRING, +) +from .options import ParserSpec, Qualifiers, to_argparse +from .ports import local_port_arg_type # Man pages are static (built when making a release). # We use this check to not include generated, system-specific information there (e.g., default --ciphers). IS_MAN_PAGE = bool(os.environ.get('HTTPIE_BUILDING_MAN_PAGES')) - options = ParserSpec( 'http', description=f'{__doc__.strip()} ', @@ -349,7 +369,7 @@ def format_style_help(available_styles, *, isolation_mode: bool = False): output_processing.add_argument( '--response-charset', metavar='ENCODING', - type=response_charset_type, + type=response_charset_arg_type, short_help='Override the response encoding for terminal display purposes.', help=""" Override the response encoding for terminal display purposes, e.g.: @@ -362,7 +382,7 @@ def format_style_help(available_styles, *, isolation_mode: bool = False): output_processing.add_argument( '--response-mime', metavar='MIME_TYPE', - type=response_mime_type, + type=response_mime_arg_type, short_help='Override the response mime type for coloring and formatting for the terminal.', help=""" Override the response mime type for coloring and formatting for the terminal, e.g.: @@ -726,6 +746,22 @@ def format_auth_help(auth_plugins_mapping, *, isolation_mode: bool = False): """, ) +network.add_argument( + '--ipv6', + '-6', + dest='force_ipv6', + default=False, + action='store_true', + short_help='Force using a IPv6 address to reach the remote peer.' +) +network.add_argument( + '--ipv4', + '-4', + dest='force_ipv4', + default=False, + action='store_true', + short_help='Force using a IPv4 address to reach the remote peer.' +) network.add_argument( '--follow', '-F', @@ -802,6 +838,98 @@ def format_auth_help(auth_plugins_mapping, *, isolation_mode: bool = False): 'The Transfer-Encoding header is set to chunked.' ) ) +network.add_argument( + "--disable-http1", + default=False, + action="store_true", + short_help="Disable the HTTP/1 protocol." +) +network.add_argument( + "--http1", + default=False, + action="store_true", + dest="force_http1", + short_help="Use the HTTP/1 protocol for the request." +) +network.add_argument( + "--disable-http2", + default=False, + action="store_true", + short_help="Disable the HTTP/2 protocol." +) +network.add_argument( + "--http2", + default=False, + action="store_true", + dest="force_http2", + short_help="Use the HTTP/2 protocol for the request." +) +network.add_argument( + "--disable-http3", + default=False, + action="store_true", + short_help="Disable the HTTP/3 over QUIC protocol." +) +network.add_argument( + "--http3", + default=False, + dest="force_http3", + action="store_true", + short_help="Use the HTTP/3 protocol for the request.", + help=""" + By default, HTTPie cannot negotiate HTTP/3 without a first HTTP/1.1, or HTTP/2 successful response unless the + remote host specified a DNS HTTPS record that indicate its support. + + The remote server yield its support for HTTP/3 in the Alt-Svc header, if present HTTPie will issue + the successive requests via HTTP/3. You may use that argument in case the remote peer does not support + either HTTP/1.1 or HTTP/2. + + """ +) +network.add_argument( + "--heb", + default=False, + dest="happy_eyeballs", + action="store_true", + short_help="Establish the connection using IETF Happy Eyeballs algorithm", + help=""" + By default, when HTTPie establish the connection it asks for the IP(v4 or v6) records of + the requested domain and then tries them sequentially preferring IPv6 by default. This + may induce longer connection delays and in some case hangs due to an unresponsive endpoint. + To concurrently try to connect to available IP(v4 or v6), set this flag. + + """ +) +network.add_argument( + "--resolver", + default=[], + action='append', + short_help="Specify a DNS resolver url to resolve hostname.", + help=""" + By default, HTTPie use the system DNS through Python standard library. + You can specify an alternative DNS server to be used. (e.g. doh://cloudflare-dns.com or doh://google.dns). + You can specify multiple resolvers with different protocols. The environment + variable $NIQUESTS_DNS_URL is supported as well. This flag also support overriding DNS resolution + e.g. passing "pie.dev:1.1.1.1" will resolve pie.dev to 1.1.1.1 IPv4. + + """ +) +network.add_argument( + "--interface", + type=interface_arg_type, + default='0.0.0.0', + short_help="Bind to a specific network interface.", +) +network.add_argument( + "--local-port", + type=local_port_arg_type, + default=0, + short_help="Set the local port to be used for the outgoing request.", + help=""" + It can be either a port range (e.g. "11221-14555") or a single port. + Some port may require root privileges (e.g. < 1024). + """ +) ####################################################################### # SSL @@ -826,23 +954,22 @@ def format_auth_help(auth_plugins_mapping, *, isolation_mode: bool = False): choices=sorted(AVAILABLE_SSL_VERSION_ARG_MAPPING.keys()), short_help='The desired protocol version to used.', help=""" - The desired protocol version to use. This will default to - SSL v2.3 which will negotiate the highest protocol that both - the server and your installation of OpenSSL support. Available protocols - may vary depending on OpenSSL installation (only the supported ones - are shown here). + The desired protocol version to use. If not specified, it tries to + negotiate the highest protocol that both the server and your installation + of OpenSSL support. Available protocols may vary depending on OpenSSL + installation (only the supported ones are shown here). """, ) CIPHERS_CURRENT_DEFAULTS = ( """ - See `http --help` for the default ciphers list on you system. + See `http --help` for the default ciphers list. """ if IS_MAN_PAGE else f""" - By default, the following ciphers are used on your system: + By default, the following ciphers are used: {DEFAULT_SSL_CIPHERS_STRING} @@ -854,6 +981,7 @@ def format_auth_help(auth_plugins_mapping, *, isolation_mode: bool = False): help=f""" A string in the OpenSSL cipher list format. + tls1.3 ciphers are always present regardless of your cipher list. {CIPHERS_CURRENT_DEFAULTS} diff --git a/httpie/cli/dicts.py b/httpie/cli/dicts.py index 6b6d4736d2..53faa234a0 100644 --- a/httpie/cli/dicts.py +++ b/httpie/cli/dicts.py @@ -1,49 +1,168 @@ +from __future__ import annotations + +import typing from collections import OrderedDict +from typing import Union, TypeVar -from multidict import MultiDict, CIMultiDict +T = TypeVar("T") -class BaseMultiDict(MultiDict): +class BaseMultiDictKeyView: """ - Base class for all MultiDicts. + Basic key view for BaseMultiDict. """ + def __init__(self, o: BaseMultiDict) -> None: + self._container = o + + def __iter__(self): + for key in self._container: + yield key + + def __contains__(self, item: str) -> bool: + return item in self._container -class HTTPHeadersDict(CIMultiDict, BaseMultiDict): + +class BaseMultiDict(typing.MutableMapping[str, Union[str, bytes]]): """ - Headers are case-insensitive and multiple values are supported - through the `add()` API. + This follow the multidict (case-insensitive) implementation but does not implement it fully. + We scoped this class according to our needs. In the future we should be able to refactor + HTTPie in order to use either kiss_headers.Headers or urllib3.HTTPHeaderDict. + The main constraints are: We use bytes sometime in values, and relly on multidict specific behaviors. """ - def add(self, key, value): - """ - Add or update a new header. + def __init__(self, d: BaseMultiDict | typing.MutableMapping[str, str | bytes] | None = None, **kwargs: str | bytes) -> None: + super().__init__() + self._container: typing.MutableMapping[str, list[tuple[str, str | bytes]] | str] = {} - If the given `value` is `None`, then all the previous - values will be overwritten and the value will be set - to `None`. - """ - if value is None: - self[key] = value + if d is not None: + self.update(d) + + for key, value in kwargs.items(): + self.add(key, value) + + def items(self) -> typing.Iterator[str, str | bytes | None]: + for key_i in self._container: + + if isinstance(self._container[key_i], str): + yield key_i, None + continue + + for original_key, value in self._container[key_i]: + yield original_key, value + + def keys(self) -> BaseMultiDictKeyView: + return BaseMultiDictKeyView(self) + + def copy(self: T) -> T: + return BaseMultiDict(self) + + def __delitem__(self, __key: str) -> None: + del self._container[__key.lower()] + + def __len__(self) -> int: + return len(self._container) + + def __iter__(self) -> typing.Iterator[str]: + for key_i in self._container: + if isinstance(self._container[key_i], list): + yield self._container[key_i][0][0] + else: + yield self._container[key_i] + + def __contains__(self, item: str) -> bool: + return item.lower() in self._container + + def update(self, __m, **kwargs) -> None: + if hasattr(__m, "items"): + for k in __m: + self[k] = None + for k, v in __m.items(): + self.add(k, v) + else: + for k, v in __m: + self.add(k, v) + + def getlist(self, key: str) -> list[str | bytes]: + key_lower = key.lower() + values = self._container[key_lower] + + if isinstance(values, str): + return [] + + return [_[-1] for _ in self._container[key_lower]] + + def __setitem__(self, key: str | bytes, val: str | bytes | None) -> None: + if isinstance(key, bytes): + key = key.decode("latin-1") + if val is not None: + self._container[key.lower()] = [(key, val,)] + else: + self._container[key.lower()] = key + + def __getitem__(self, key: str) -> str | None: + values = self._container[key.lower()] + if isinstance(values, str): return None + return ",".join([_[-1].decode() if isinstance(_[-1], bytes) else _[-1] for _ in values]) + + def popone(self, key: str) -> str | bytes: + key_lower = key.lower() + + val = self._container[key_lower].pop() + + if not self._container[key_lower]: + self._container[key_lower] = key + + return val[-1] + + def popall(self, key: str) -> list[str]: + key_lower = key.lower() + values = self._container[key_lower] + + self._container[key_lower] = values[0][0] + + return [_[-1] for _ in values] + + def add(self, key: str | bytes, val: str | bytes | None) -> None: + if isinstance(key, bytes): + key = key.decode("latin-1") - # If the previous value for the given header is `None` - # then discard it since we are explicitly giving a new - # value for it. - if key in self and self.getone(key) is None: - self.popone(key) + key_lower = key.lower() - super().add(key, value) + if val is None: + self._container[key_lower] = key + return - def remove_item(self, key, value): + if key_lower not in self._container or isinstance(self._container[key_lower], str): + self._container[key_lower] = [] + + self._container[key_lower].append((key, val,)) + + def remove_item(self, key: str, value: str | bytes) -> None: """ Remove a (key, value) pair from the dict. """ - existing_values = self.popall(key) - existing_values.remove(value) + key_lower = key.lower() - for value in existing_values: - self.add(key, value) + to_remove = None + + for k, v in self._container[key_lower]: + if (key == k or key == key_lower) and v == value: + to_remove = (k, v) + break + + if to_remove: + self._container[key_lower].remove(to_remove) + if not self._container[key_lower]: + del self._container[key_lower] + + +class HTTPHeadersDict(BaseMultiDict): + """ + Headers are case-insensitive and multiple values are supported + through the `add()` API. + """ class RequestJSONDataDict(OrderedDict): diff --git a/httpie/cli/ports.py b/httpie/cli/ports.py new file mode 100644 index 0000000000..c482a6083a --- /dev/null +++ b/httpie/cli/ports.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import argparse +from random import randint +from typing import Tuple + + +MIN_PORT = 0 +MAX_PORT = 65535 +OUTSIDE_VALID_PORT_RANGE_ERROR = f'outside valid port range {MIN_PORT}-{MAX_PORT}' + + +def local_port_arg_type(port: str) -> int: + port = parse_local_port_arg(port) + if isinstance(port, tuple): + port = randint(*port) + return port + + +def parse_local_port_arg(port: str) -> int | Tuple[int, int]: + if '-' in port[1:]: # Don’t treat negative port as range. + return _clean_port_range(port) + return _clean_port(port) + + +def _clean_port_range(port_range: str) -> Tuple[int, int]: + """ + We allow two digits separated by a hyphen to represent a port range. + + The parsing is done so that even negative numbers get parsed correctly, allowing us to + give a more specific outside-range error message. + + """ + sep_pos = port_range.find('-', 1) + start, end = port_range[:sep_pos], port_range[sep_pos + 1:] + start = _clean_port(start) + end = _clean_port(end) + if start > end: + raise argparse.ArgumentTypeError(f'{port_range!r} is not a valid port range') + return start, end + + +def _clean_port(port: str) -> int: + try: + port = int(port) + except ValueError: + raise argparse.ArgumentTypeError(f'{port!r} is not a number') + if not (MIN_PORT <= port <= MAX_PORT): + raise argparse.ArgumentTypeError( + f'{port!r} is {OUTSIDE_VALID_PORT_RANGE_ERROR}' + ) + return port diff --git a/httpie/client.py b/httpie/client.py index a1da284a7c..05949f4c81 100644 --- a/httpie/client.py +++ b/httpie/client.py @@ -1,19 +1,20 @@ +from __future__ import annotations + import argparse -import http.client import json import sys -from contextlib import contextmanager +import typing +from pathlib import Path from time import monotonic from typing import Any, Dict, Callable, Iterable from urllib.parse import urlparse, urlunparse +import ipaddress -import requests -# noinspection PyPackageRequirements -import urllib3 -from urllib3.util import SKIP_HEADER, SKIPPABLE_HEADERS +import niquests from . import __version__ from .adapters import HTTPieHTTPAdapter +from .compat import urllib3, SKIP_HEADER, SKIPPABLE_HEADERS, parse_url, Timeout from .cli.constants import HTTP_OPTIONS from .cli.dicts import HTTPHeadersDict from .cli.nested_json import unwrap_top_level_list_if_needed @@ -22,7 +23,7 @@ from .models import RequestsMessage from .plugins.registry import plugin_manager from .sessions import get_httpie_session -from .ssl_ import AVAILABLE_SSL_VERSION_ARG_MAPPING, HTTPieCertificate, HTTPieHTTPSAdapter +from .ssl_ import AVAILABLE_SSL_VERSION_ARG_MAPPING, HTTPieCertificate, HTTPieHTTPSAdapter, QuicCapabilityCache from .uploads import ( compress_request, prepare_request_body, get_multipart_data_and_content_type, @@ -44,6 +45,7 @@ def collect_messages( env: Environment, args: argparse.Namespace, request_body_read_callback: Callable[[bytes], None] = None, + request_or_response_callback: Callable[[niquests.PreparedRequest | niquests.Response], None] = None, ) -> Iterable[RequestsMessage]: httpie_session = None httpie_session_headers = None @@ -65,12 +67,47 @@ def collect_messages( ) send_kwargs = make_send_kwargs(args) send_kwargs_mergeable_from_env = make_send_kwargs_mergeable_from_env(args) + parsed_url = parse_url(args.url) + resolver = args.resolver or None + + # We want to make sure every ".localhost" host resolve to loopback + if parsed_url.host and parsed_url.host.endswith(".localhost"): + ensure_resolver = f"in-memory://default/?hosts={parsed_url.host}:127.0.0.1&hosts={parsed_url.host}:[::1]" + if resolver and isinstance(resolver, list): + resolver.append(ensure_resolver) + else: + resolver = [ensure_resolver, "system://"] + requests_session = build_requests_session( ssl_version=args.ssl_version, ciphers=args.ciphers, - verify=bool(send_kwargs_mergeable_from_env['verify']) + verify=bool(send_kwargs_mergeable_from_env['verify']), + disable_http1=args.disable_http1, + disable_http2=args.disable_http2, + disable_http3=args.disable_http3, + resolver=resolver, + disable_ipv6=args.force_ipv4, + disable_ipv4=args.force_ipv6, + source_address=(args.interface, args.local_port), + quic_cache=env.config.quic_file, + happy_eyeballs=args.happy_eyeballs, ) + if not args.disable_http3 and args.force_http3: + requests_session.quic_cache_layer[(parsed_url.host, parsed_url.port or 443)] = (parsed_url.host, parsed_url.port or 443) + # well, this one is tricky. If we allow HTTP/3, and remote host was marked as QUIC capable + # but is not anymore, we may face an indefinite hang if timeout isn't set. This could surprise some user. + elif ( + not args.disable_http3 + and not args.force_http3 + and requests_session.quic_cache_layer.get((parsed_url.host, parsed_url.port or 443)) is not None + + ): + # we only set the connect timeout, the rest is still indefinite. + if send_kwargs["timeout"] is None: + send_kwargs["timeout"] = Timeout(connect=3) + setattr(args, "_failsafe_http3", True) + if httpie_session: httpie_session.update_headers(request_kwargs['headers']) requests_session.cookies = httpie_session.cookies @@ -88,7 +125,19 @@ def collect_messages( # TODO: reflect the split between request and send kwargs. dump_request(request_kwargs) - request = requests.Request(**request_kwargs) + hooks = None + + if request_or_response_callback: + # The hook set up bellow is crucial for HTTPie. + # It will help us yield the request before it is + # actually sent. This will permit us to know about + # the connection information for example. + hooks = { + 'pre_send': [request_or_response_callback], + 'early_response': [request_or_response_callback], + } + + request = niquests.Request(**request_kwargs, hooks=hooks) prepared_request = requests_session.prepare_request(request) transform_headers(request, prepared_request) if args.path_as_is: @@ -110,12 +159,20 @@ def collect_messages( url=prepared_request.url, **send_kwargs_mergeable_from_env, ) - with max_headers(args.max_headers): - response = requests_session.send( - request=prepared_request, - **send_kwargs_merged, - **send_kwargs, - ) + response = requests_session.send( + request=prepared_request, + **send_kwargs_merged, + **send_kwargs, + ) + if args.max_headers and len(response.headers) > args.max_headers: + try: + requests_session.close() + # we consume the content to allow the connection to be put back into the pool, and closed! + response.content + except NotImplementedError: # We allow custom transports that may not implement close. + pass + + raise niquests.ConnectionError(f"got more than {args.max_headers} headers") response._httpie_headers_parsed_at = monotonic() expired_cookies += get_expired_cookies( response.headers.get('Set-Cookie', '') @@ -124,7 +181,7 @@ def collect_messages( response_count += 1 if response.next: if args.max_redirects and response_count == args.max_redirects: - raise requests.TooManyRedirects + raise niquests.TooManyRedirects if args.follow: prepared_request = response.next if args.all: @@ -139,29 +196,57 @@ def collect_messages( httpie_session.remove_cookies(expired_cookies) httpie_session.save() - -# noinspection PyProtectedMember -@contextmanager -def max_headers(limit): - # - # noinspection PyUnresolvedReferences - orig = http.client._MAXHEADERS - http.client._MAXHEADERS = limit or float('Inf') try: - yield - finally: - http.client._MAXHEADERS = orig + requests_session.close() + except NotImplementedError: # We allow custom transports that may not implement close. + pass def build_requests_session( verify: bool, ssl_version: str = None, ciphers: str = None, -) -> requests.Session: - requests_session = requests.Session() + disable_http1: bool = False, + disable_http2: bool = False, + disable_http3: bool = False, + resolver: typing.List[str] = None, + disable_ipv4: bool = False, + disable_ipv6: bool = False, + source_address: typing.Tuple[str, int] = None, + quic_cache: typing.Optional[Path] = None, + happy_eyeballs: bool = False, +) -> niquests.Session: + requests_session = niquests.Session() + + if quic_cache is not None: + requests_session.quic_cache_layer = QuicCapabilityCache(quic_cache) + + if resolver: + resolver_rebuilt = [] + for r in resolver: + # assume it is the in-memory resolver + if "://" not in r: + if ":" not in r or r.count(':') != 1: + raise ValueError("The manual resolver for a specific host requires to be formatted like 'hostname:ip'. e.g. 'pie.dev:1.1.1.1'.") + hostname, override_ip = r.split(':') + if hostname.strip() == "" or override_ip.strip() == "": + raise ValueError("The manual resolver for a specific host requires to be formatted like 'hostname:ip'. e.g. 'pie.dev:1.1.1.1'.") + ipaddress.ip_address(override_ip) + r = f"in-memory://default/?hosts={r}" + resolver_rebuilt.append(r) + resolver = resolver_rebuilt # Install our adapter. - http_adapter = HTTPieHTTPAdapter() + http_adapter = HTTPieHTTPAdapter( + resolver=resolver, + disable_ipv4=disable_ipv4, + disable_ipv6=disable_ipv6, + source_address=source_address, + disable_http1=disable_http1, + disable_http2=disable_http2, + disable_http3=disable_http3, + happy_eyeballs=happy_eyeballs, + ) https_adapter = HTTPieHTTPSAdapter( ciphers=ciphers, verify=verify, @@ -169,6 +254,15 @@ def build_requests_session( AVAILABLE_SSL_VERSION_ARG_MAPPING[ssl_version] if ssl_version else None ), + disable_http1=disable_http1, + disable_http2=disable_http2, + disable_http3=disable_http3, + resolver=resolver, + disable_ipv4=disable_ipv4, + disable_ipv6=disable_ipv6, + source_address=source_address, + quic_cache_layer=requests_session.quic_cache_layer, + happy_eyeballs=happy_eyeballs, ) requests_session.mount('http://', http_adapter) requests_session.mount('https://', https_adapter) @@ -186,7 +280,7 @@ def build_requests_session( def dump_request(kwargs: dict): sys.stderr.write( - f'\n>>> requests.request(**{repr_dict(kwargs)})\n\n') + f'\n>>> niquests.request(**{repr_dict(kwargs)})\n\n') def finalize_headers(headers: HTTPHeadersDict) -> HTTPHeadersDict: @@ -210,13 +304,13 @@ def finalize_headers(headers: HTTPHeadersDict) -> HTTPHeadersDict: def transform_headers( - request: requests.Request, - prepared_request: requests.PreparedRequest + request: niquests.Request, + prepared_request: niquests.PreparedRequest ) -> None: """Apply various transformations on top of the `prepared_requests`'s headers to change the request prepreation behavior.""" - # Remove 'Content-Length' when it is misplaced by requests. + # Remove 'Content-Length' when it is misplaced by niquests. if ( prepared_request.method in IGNORE_CONTENT_LENGTH_METHODS and prepared_request.headers.get('Content-Length') == '0' @@ -232,7 +326,7 @@ def transform_headers( def apply_missing_repeated_headers( original_headers: HTTPHeadersDict, - prepared_request: requests.PreparedRequest + prepared_request: niquests.PreparedRequest ) -> None: """Update the given `prepared_request`'s headers with the original ones. This allows the requests to be prepared as usual, and then later @@ -290,12 +384,6 @@ def make_send_kwargs_mergeable_from_env(args: argparse.Namespace) -> dict: if args.cert: cert = args.cert if args.cert_key: - # Having a client certificate key passphrase is not supported - # by requests. So we are using our own transportation structure - # which is compatible with their format (a tuple of minimum two - # items). - # - # See: https://github.com/psf/requests/issues/2519 cert = HTTPieCertificate(cert, args.cert_key, args.cert_key_pass.value) return { @@ -329,7 +417,7 @@ def make_request_kwargs( request_body_read_callback=lambda chunk: chunk ) -> dict: """ - Translate our `args` into `requests.Request` keyword arguments. + Translate our `args` into `niquests.Request` keyword arguments. """ files = args.files diff --git a/httpie/compat.py b/httpie/compat.py index fcf167ca7d..a728e9d674 100644 --- a/httpie/compat.py +++ b/httpie/compat.py @@ -2,8 +2,49 @@ from typing import Any, Optional, Iterable from httpie.cookies import HTTPieCookiePolicy -from http import cookiejar # noqa +from http import cookiejar # noqa + +import niquests +from niquests._compat import HAS_LEGACY_URLLIB3 + + +# to understand why this is required +# see https://niquests.readthedocs.io/en/latest/community/faq.html#what-is-urllib3-future +# short story, urllib3 (import/top-level import) may be the legacy one https://github.com/urllib3/urllib3 +# instead of urllib3-future https://github.com/jawah/urllib3.future used by Niquests +# or only the secondary entry point could be available (e.g. urllib3_future on some distro without urllib3) +if not HAS_LEGACY_URLLIB3: + # noinspection PyPackageRequirements + import urllib3 # noqa: F401 + from urllib3.util import SKIP_HEADER, SKIPPABLE_HEADERS, parse_url, Timeout # noqa: F401 + from urllib3.fields import RequestField, format_header_param_rfc2231 # noqa: F401 + from urllib3.util.ssl_ import ( # noqa: F401 + create_urllib3_context, + resolve_ssl_version, + ) +else: + # noinspection PyPackageRequirements + import urllib3_future as urllib3 # noqa: F401 + from urllib3_future.util import SKIP_HEADER, SKIPPABLE_HEADERS, parse_url, Timeout # noqa: F401 + from urllib3_future.fields import RequestField, format_header_param_rfc2231 # noqa: F401 + from urllib3_future.util.ssl_ import ( # noqa: F401 + create_urllib3_context, + resolve_ssl_version, + ) + +# Importlib_metadata was a provisional module, so the APIs changed quite a few times +# between 3.8-3.10. It was also not included in the standard library until 3.8, so +# we install the backport for <3.8. +if sys.version_info >= (3, 8): + import importlib.metadata as importlib_metadata +else: + import importlib_metadata +is_windows = 'win32' in str(sys.platform).lower() +is_frozen = getattr(sys, 'frozen', False) + +MIN_SUPPORTED_PY_VERSION = (3, 7) +MAX_SUPPORTED_PY_VERSION = (3, 11) # Request does not carry the original policy attached to the # cookie jar, so until it is resolved we change the global cookie @@ -11,11 +52,26 @@ cookiejar.DefaultCookiePolicy = HTTPieCookiePolicy -is_windows = 'win32' in str(sys.platform).lower() -is_frozen = getattr(sys, 'frozen', False) +def has_ipv6_support(new_value: Optional[bool] = None) -> bool: + if new_value is not None: + # Allow overriding the default value for testing purposes. + urllib3.util.connection.HAS_IPV6 = new_value + return urllib3.util.connection.HAS_IPV6 + + +def enforce_niquests(): + """ + Force imported 3rd-party plugins to use `niquests` instead of `requests` if they haven’t migrated yet. + + It’s a drop-in replacement for Requests so such plugins might continue to work unless they touch internals. + + """ + sys.modules["requests"] = niquests + sys.modules["requests.adapters"] = niquests.adapters + sys.modules["requests.sessions"] = niquests.sessions + sys.modules["requests.exceptions"] = niquests.exceptions + sys.modules["requests.packages.urllib3"] = urllib3 -MIN_SUPPORTED_PY_VERSION = (3, 7) -MAX_SUPPORTED_PY_VERSION = (3, 11) try: from functools import cached_property @@ -67,16 +123,6 @@ def __get__(self, instance, cls=None): return res -# importlib_metadata was a provisional module, so the APIs changed quite a few times -# between 3.8-3.10. It was also not included in the standard library until 3.8, so -# we install the backport for <3.8. - -if sys.version_info >= (3, 8): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata - - def find_entry_points(entry_points: Any, group: str) -> Iterable[importlib_metadata.EntryPoint]: if hasattr(entry_points, "select"): # Python 3.10+ / importlib_metadata >= 3.9.0 return entry_points.select(group=group) diff --git a/httpie/config.py b/httpie/config.py index 27bc0a784d..34ae72ca4c 100644 --- a/httpie/config.py +++ b/httpie/config.py @@ -143,13 +143,17 @@ class Config(BaseConfigDict): def __init__(self, directory: Union[str, Path] = DEFAULT_CONFIG_DIR): self.directory = Path(directory) super().__init__(path=self.directory / self.FILENAME) + # this one ensure we do not init HTTPie without the proper config directory + # there's an issue where the fetch_update daemon run without having the directory present. that induce a + # loop trying to fetch latest versions information. + self.ensure_directory() self.update(self.DEFAULTS) @property def default_options(self) -> list: return self['default_options'] - def _configured_path(self, config_option: str, default: str) -> None: + def _configured_path(self, config_option: str, default: str) -> Path: return Path( self.get(config_option, self.directory / default) ).expanduser().resolve() @@ -162,6 +166,10 @@ def plugins_dir(self) -> Path: def version_info_file(self) -> Path: return self._configured_path('version_info_file', 'version_info.json') + @property + def quic_file(self) -> Path: + return self._configured_path('quic_file', 'quic.json') + @property def developer_mode(self) -> bool: """This is a special setting for the development environment. It is diff --git a/httpie/context.py b/httpie/context.py index 2a54f46916..b853339963 100644 --- a/httpie/context.py +++ b/httpie/context.py @@ -99,8 +99,9 @@ def __init__(self, devnull=None, **kwargs): assert all(hasattr(type(self), attr) for attr in kwargs.keys()) self.__dict__.update(**kwargs) - # The original STDERR unaffected by --quiet’ing. + # The original STDERR/STDOUT unaffected by --quiet’ing. self._orig_stderr = self.stderr + self._orig_stdout = self.stdout self._devnull = devnull # Keyword arguments > stream.encoding > default UTF-8 diff --git a/httpie/core.py b/httpie/core.py index d0c26dcbcc..ccd4477967 100644 --- a/httpie/core.py +++ b/httpie/core.py @@ -3,13 +3,15 @@ import platform import sys import socket +from time import monotonic from typing import List, Optional, Union, Callable -import requests +import niquests from pygments import __version__ as pygments_version -from requests import __version__ as requests_version +from niquests import __version__ as requests_version from . import __version__ as httpie_version +from .cli.argparser import HTTPieArgumentParser from .cli.constants import OUT_REQ_BODY from .cli.nested_json import NestedJSONSyntaxError from .client import collect_messages @@ -30,7 +32,7 @@ # noinspection PyDefaultArgument def raw_main( - parser: argparse.ArgumentParser, + parser: HTTPieArgumentParser, main_program: Callable[[argparse.Namespace, Environment], ExitStatus], args: List[Union[str, bytes]] = sys.argv, env: Environment = Environment(), @@ -97,10 +99,7 @@ def handle_generic_error(e, annotation=None): else: check_updates(env) try: - exit_status = main_program( - args=parsed_args, - env=env, - ) + exit_status = main_program(parsed_args, env) except KeyboardInterrupt: env.stderr.write('\n') if include_traceback: @@ -112,16 +111,23 @@ def handle_generic_error(e, annotation=None): if include_traceback: raise exit_status = ExitStatus.ERROR - except requests.Timeout: + except niquests.Timeout: exit_status = ExitStatus.ERROR_TIMEOUT - env.log_error(f'Request timed out ({parsed_args.timeout}s).') - except requests.TooManyRedirects: + # this detects if we tried to connect with HTTP/3 when the remote isn't compatible anymore. + if hasattr(parsed_args, "_failsafe_http3"): + env.log_error( + f'Unable to connect. Was the remote specified HTTP/3 compatible but is not anymore? ' + f'Remove "{env.config.quic_file}" to clear it out. Or set --disable-http3 flag.' + ) + else: + env.log_error(f'Request timed out ({parsed_args.timeout}s).') + except niquests.TooManyRedirects: exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS env.log_error( f'Too many redirects' f' (--max-redirects={parsed_args.max_redirects}).' ) - except requests.exceptions.ConnectionError as exc: + except niquests.exceptions.ConnectionError as exc: annotation = None original_exc = unwrap_context(exc) if isinstance(original_exc, socket.gaierror): @@ -143,6 +149,7 @@ def handle_generic_error(e, annotation=None): return exit_status +# noinspection PyDefaultArgument def main( args: List[Union[str, bytes]] = sys.argv, env: Environment = Environment() @@ -175,8 +182,8 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus: # TODO: Refactor and drastically simplify, especially so that the separator logic is elsewhere. exit_status = ExitStatus.SUCCESS downloader = None - initial_request: Optional[requests.PreparedRequest] = None - final_response: Optional[requests.Response] = None + initial_request: Optional[niquests.PreparedRequest] = None + final_response: Optional[niquests.Response] = None processing_options = ProcessingOptions.from_raw_args(args) def separate(): @@ -204,8 +211,42 @@ def request_body_read_callback(chunk: bytes): args.follow = True # --download implies --follow. downloader = Downloader(env, output_file=args.output_file, resume=args.download_resume) downloader.pre_request(args.headers) - messages = collect_messages(env, args=args, - request_body_read_callback=request_body_read_callback) + + def request_or_response_callback(delayed_message): + """This callback is called in two scenario: + + (i) just after initializing a connection to remote host + (ii) an early response has been received (1xx responses)""" + + oo = OutputOptions.from_message( + delayed_message, + args.output_options + ) + + if hasattr(delayed_message, "body"): + oo = oo._replace( + body=isinstance(delayed_message.body, (str, bytes)) and (args.verbose or oo.body) + ) + else: + delayed_message._httpie_headers_parsed_at = monotonic() + + write_message( + requests_message=delayed_message, + env=env, + output_options=oo, + processing_options=processing_options + ) + + if oo.body > 1: + separate() + + messages = collect_messages( + env, + args=args, + request_body_read_callback=request_body_read_callback, + request_or_response_callback=request_or_response_callback + ) + force_separator = False prev_with_body = False @@ -225,6 +266,13 @@ def request_body_read_callback(chunk: bytes): is_streamed_upload = not isinstance(message.body, (str, bytes)) do_write_body = not is_streamed_upload force_separator = is_streamed_upload and env.stdout_isatty + # We're in a REQUEST message, we rather output the message + # in prepared_request_readiness because we want "message.conn_info" + # to be set appropriately. (e.g. know about HTTP protocol version, etc...) + if message.conn_info is None and not args.offline: + # bellow variable will be accessed by prepared_request_readiness just after. + prev_with_body = output_options.body + continue else: final_response = message if args.check_status or downloader: @@ -252,7 +300,7 @@ def request_body_read_callback(chunk: bytes): ) write_stream(stream=download_stream, outfile=download_to, flush=False) downloader.finish() - if downloader.interrupted: + if downloader.is_interrupted: exit_status = ExitStatus.ERROR env.log_error( f'Incomplete download: size={downloader.status.total_size};' @@ -261,6 +309,11 @@ def request_body_read_callback(chunk: bytes): return exit_status finally: + if args.data and hasattr(args.data, "close"): + args.data.close() + if args.files and hasattr(args.files, "items"): + for fd in args.files.items(): + fd[1][1].close() if downloader and not downloader.finished: downloader.failed() if args.output_file and args.output_file_specified: @@ -270,7 +323,7 @@ def request_body_read_callback(chunk: bytes): def print_debug_info(env: Environment): env.stderr.writelines([ f'HTTPie {httpie_version}\n', - f'Requests {requests_version}\n', + f'Niquests {requests_version}\n', f'Pygments {pygments_version}\n', f'Python {sys.version}\n{sys.executable}\n', f'{platform.system()} {platform.release()}', diff --git a/httpie/downloads.py b/httpie/downloads.py index 9c4b895e6f..0a8bccd22f 100644 --- a/httpie/downloads.py +++ b/httpie/downloads.py @@ -7,14 +7,15 @@ import re from mailbox import Message from time import monotonic -from typing import IO, Optional, Tuple +from typing import IO, Optional, Tuple, List, Union from urllib.parse import urlsplit -import requests +import niquests from .models import HTTPResponse, OutputOptions from .output.streams import RawStream from .context import Environment +from .utils import split_header_values PARTIAL_CONTENT = 206 @@ -159,6 +160,28 @@ def get_unique_filename(filename: str, exists=os.path.exists) -> str: attempt += 1 +def get_content_length(response: niquests.Response) -> Optional[int]: + try: + return int(response.headers['Content-Length']) + except (KeyError, ValueError, TypeError): + pass + + +def get_decodeable_content_encodings(encoded_response: niquests.Response) -> Optional[List[str]]: + content_encoding = encoded_response.headers.get('Content-Encoding') + if not content_encoding: + return None + applied_encodings = split_header_values(content_encoding) + try: + supported_decoders = encoded_response.raw.CONTENT_DECODERS + except AttributeError: + supported_decoders = ['gzip', 'deflate'] + for encoding in applied_encodings: + if encoding not in supported_decoders: + return None + return applied_encodings + + class Downloader: def __init__( @@ -174,11 +197,10 @@ def __init__( :param output_file: The file to store response body in. If not provided, it will be guessed from the response. - :param progress_file: Where to report download progress. - """ self.finished = False self.status = DownloadStatus(env=env) + self._output_file_created = False self._output_file = output_file self._resume = resume self._resumed_from = 0 @@ -189,8 +211,6 @@ def pre_request(self, request_headers: dict): Might alter `request_headers`. """ - # Ask the server not to encode the content so that we can resume, etc. - request_headers['Accept-Encoding'] = 'identity' if self._resume: bytes_have = os.path.getsize(self._output_file.name) if bytes_have: @@ -202,7 +222,7 @@ def pre_request(self, request_headers: dict): def start( self, initial_url: str, - final_response: requests.Response + final_response: niquests.Response ) -> Tuple[RawStream, IO]: """ Initiate and return a stream for `response` body with progress @@ -216,18 +236,18 @@ def start( """ assert not self.status.time_started - # FIXME: some servers still might sent Content-Encoding: gzip - # - try: - total_size = int(final_response.headers['Content-Length']) - except (KeyError, ValueError, TypeError): - total_size = None + # Even though we specify `Accept-Encoding: identity`, the server might still encode the response. + # In such cases, the reported size will be of the decoded content, not the downloaded bytes. + # This is a limitation of the underlying Niquests library . + decoded_from = get_decodeable_content_encodings(final_response) + total_size = get_content_length(final_response) if not self._output_file: self._output_file = self._get_output_file_from_response( initial_url=initial_url, final_response=final_response, ) + self._output_file_created = True else: # `--output, -o` provided if self._resume and final_response.status_code == PARTIAL_CONTENT: @@ -254,7 +274,8 @@ def start( self.status.started( output_file=self._output_file, resumed_from=self._resumed_from, - total_size=total_size + total_size=total_size, + decoded_from=decoded_from, ) return stream, self._output_file @@ -263,32 +284,37 @@ def finish(self): assert not self.finished self.finished = True self.status.finished() + self._cleanup() def failed(self): self.status.terminate() + self._cleanup() + + def _cleanup(self): + if self._output_file_created: + self._output_file.close() @property - def interrupted(self) -> bool: - return ( - self.finished - and self.status.total_size - and self.status.total_size != self.status.downloaded - ) + def is_interrupted(self) -> bool: + return self.status.is_interrupted - def chunk_downloaded(self, chunk: bytes): + def chunk_downloaded(self, chunk_or_new_total: Union[bytes, int]) -> None: """ A download progress callback. - :param chunk: A chunk of response body data that has just + :param chunk_or_new_total: A chunk of response body data that has just been downloaded and written to the output. """ - self.status.chunk_downloaded(len(chunk)) + if isinstance(chunk_or_new_total, int): + self.status.set_total(chunk_or_new_total) + else: + self.status.chunk_downloaded(len(chunk_or_new_total)) @staticmethod def _get_output_file_from_response( initial_url: str, - final_response: requests.Response, + final_response: niquests.Response, ) -> IO: # Output file not specified. Pick a name that doesn't exist yet. filename = None @@ -304,6 +330,9 @@ def _get_output_file_from_response( return open(unique_filename, buffering=0, mode='a+b') +DECODED_FROM_SUFFIX = ' - decoded using {encodings}' + + class DownloadStatus: """Holds details about the download status.""" @@ -311,50 +340,78 @@ def __init__(self, env): self.env = env self.downloaded = 0 self.total_size = None + self.decoded_from = [] self.resumed_from = 0 self.time_started = None self.time_finished = None + self.display = None - def started(self, output_file, resumed_from=0, total_size=None): + def started(self, output_file, resumed_from=0, total_size=None, decoded_from: List[str] = None): assert self.time_started is None self.total_size = total_size + self.decoded_from = decoded_from self.downloaded = self.resumed_from = resumed_from self.time_started = monotonic() self.start_display(output_file=output_file) def start_display(self, output_file): from httpie.output.ui.rich_progress import ( - DummyDisplay, - StatusDisplay, - ProgressDisplay + DummyProgressDisplay, + ProgressDisplayNoTotal, + ProgressDisplayFull ) - message = f'Downloading to {output_file.name}' - if self.env.show_displays: - if self.total_size is None: - # Rich does not support progress bars without a total - # size given. Instead we use status objects. - self.display = StatusDisplay(self.env) - else: - self.display = ProgressDisplay(self.env) + summary_suffix = '' + + if self.decoded_from: + encodings = ', '.join(f'`{enc}`' for enc in self.decoded_from) + message_suffix = DECODED_FROM_SUFFIX.format(encodings=encodings) + else: + message_suffix = '' + + if not self.env.show_displays: + progress_display_class = DummyProgressDisplay else: - self.display = DummyDisplay(self.env) + has_reliable_total = self.total_size is not None - self.display.start( - total=self.total_size, - at=self.downloaded, - description=message + if has_reliable_total: + progress_display_class = ProgressDisplayFull + else: + progress_display_class = ProgressDisplayNoTotal + + self.display = progress_display_class( + env=self.env, + total_size=self.total_size, + resumed_from=self.resumed_from, + description=message + message_suffix, + summary_suffix=summary_suffix, ) + self.display.start() def chunk_downloaded(self, size): assert self.time_finished is None self.downloaded += size self.display.update(size) + def set_total(self, total: int) -> None: + assert self.time_finished is None + prev_value = self.downloaded + self.downloaded = total + self.display.update(total - prev_value) + @property def has_finished(self): return self.time_finished is not None + @property + def is_interrupted(self): + return ( + self.has_finished + and self.total_size is not None + and not self.decoded_from + and self.total_size != self.downloaded + ) + @property def time_spent(self): if ( @@ -369,9 +426,9 @@ def finished(self): assert self.time_started is not None assert self.time_finished is None self.time_finished = monotonic() - if hasattr(self, 'display'): + if self.display: self.display.stop(self.time_spent) def terminate(self): - if hasattr(self, 'display'): + if self.display: self.display.stop(self.time_spent) diff --git a/httpie/internal/daemons.py b/httpie/internal/daemons.py index 929f960ca0..4289ec5553 100644 --- a/httpie/internal/daemons.py +++ b/httpie/internal/daemons.py @@ -109,8 +109,8 @@ def _spawn(args: List[str], process_context: ProcessContext) -> None: _spawn_posix(args, process_context) -def spawn_daemon(task: str) -> None: - args = [task, '--daemon'] +def spawn_daemon(task: str, *args: str) -> None: + args = [task, '--daemon', *args] process_context = os.environ.copy() if not is_frozen: file_path = os.path.abspath(inspect.stack()[0][1]) diff --git a/httpie/internal/encoder.py b/httpie/internal/encoder.py new file mode 100644 index 0000000000..3a8d73657b --- /dev/null +++ b/httpie/internal/encoder.py @@ -0,0 +1,476 @@ +""" +This program is part of the requests_toolbelt package. + +Copyright 2014 Ian Cordasco, Cory Benfield + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import contextlib +import io +import os +from uuid import uuid4 + +from ..compat import RequestField, format_header_param_rfc2231 + + +class MultipartEncoder(object): + """ + The ``MultipartEncoder`` object is a generic interface to the engine that + will create a ``multipart/form-data`` body for you. + + The basic usage is: + + .. code-block:: python + + import requests + from requests_toolbelt import MultipartEncoder + + encoder = MultipartEncoder({'field': 'value', + 'other_field': 'other_value'}) + r = requests.post('https://httpbin.org/post', data=encoder, + headers={'Content-Type': encoder.content_type}) + + If you do not need to take advantage of streaming the post body, you can + also do: + + .. code-block:: python + + r = requests.post('https://httpbin.org/post', + data=encoder.to_string(), + headers={'Content-Type': encoder.content_type}) + + If you want the encoder to use a specific order, you can use an + OrderedDict or more simply, a list of tuples: + + .. code-block:: python + + encoder = MultipartEncoder([('field', 'value'), + ('other_field', 'other_value')]) + + .. versionchanged:: 0.4.0 + + You can also provide tuples as part values as you would provide them to + requests' ``files`` parameter. + + .. code-block:: python + + encoder = MultipartEncoder({ + 'field': ('file_name', b'{"a": "b"}', 'application/json', + {'X-My-Header': 'my-value'}) + ]) + + .. warning:: + + This object will end up directly in :mod:`httplib`. Currently, + :mod:`httplib` has a hard-coded read size of **8192 bytes**. This + means that it will loop until the file has been read and your upload + could take a while. This is **not** a bug in requests. A feature is + being considered for this object to allow you, the user, to specify + what size should be returned on a read. If you have opinions on this, + please weigh in on `this issue`_. + + .. _this issue: + https://github.com/requests/toolbelt/issues/75 + + """ + + def __init__(self, fields, boundary=None, encoding='utf-8'): + #: Boundary value either passed in by the user or created + self.boundary_value = boundary or uuid4().hex + + # Computed boundary + self.boundary = '--{}'.format(self.boundary_value) + + #: Encoding of the data being passed in + self.encoding = encoding + + # Pre-encoded boundary + self._encoded_boundary = b''.join([ + self.boundary.encode(self.encoding), + '\r\n'.encode(self.encoding) + ]) + + #: Fields provided by the user + self.fields = fields + + #: Whether or not the encoder is finished + self.finished = False + + #: Pre-computed parts of the upload + self.parts = [] + + # Pre-computed parts iterator + self._iter_parts = iter([]) + + # The part we're currently working with + self._current_part = None + + # Cached computation of the body's length + self._len = None + + # Our buffer + self._buffer = CustomBytesIO(encoding=encoding) + + # Pre-compute each part's headers + self._prepare_parts() + + # Load boundary into buffer + self._write_boundary() + + @property + def len(self): + """Length of the multipart/form-data body. + + requests will first attempt to get the length of the body by calling + ``len(body)`` and then by checking for the ``len`` attribute. + + On 32-bit systems, the ``__len__`` method cannot return anything + larger than an integer (in C) can hold. If the total size of the body + is even slightly larger than 4GB users will see an OverflowError. This + manifested itself in `bug #80`_. + + As such, we now calculate the length lazily as a property. + + .. _bug #80: + https://github.com/requests/toolbelt/issues/80 + """ + # If _len isn't already calculated, calculate, return, and set it + return self._len or self._calculate_length() + + def __repr__(self): + return ''.format(self.fields) + + def _calculate_length(self): + """ + This uses the parts to calculate the length of the body. + + This returns the calculated length so __len__ can be lazy. + """ + boundary_len = len(self.boundary) # Length of --{boundary} + # boundary length + header length + body length + len('\r\n') * 2 + + self._len = sum( + (boundary_len + total_len(p) + 4) for p in self.parts + ) + boundary_len + 4 + + return self._len + + def _calculate_load_amount(self, read_size): + """This calculates how many bytes need to be added to the buffer. + + When a consumer read's ``x`` from the buffer, there are two cases to + satisfy: + + 1. Enough data in the buffer to return the requested amount + 2. Not enough data + + This function uses the amount of unread bytes in the buffer and + determines how much the Encoder has to load before it can return the + requested amount of bytes. + + :param int read_size: the number of bytes the consumer requests + :returns: int -- the number of bytes that must be loaded into the + buffer before the read can be satisfied. This will be strictly + non-negative + """ + amount = read_size - total_len(self._buffer) + return amount if amount > 0 else 0 + + def _load(self, amount): + """Load ``amount`` number of bytes into the buffer.""" + self._buffer.smart_truncate() + part = self._current_part or self._next_part() + while amount == -1 or amount > 0: + written = 0 + if part and not part.bytes_left_to_write(): + written += self._write(b'\r\n') + written += self._write_boundary() + part = self._next_part() + + if not part: + written += self._write_closing_boundary() + self.finished = True + break + + written += part.write_to(self._buffer, amount) + + if amount != -1: + amount -= written + + def _next_part(self): + try: + if self._current_part is not None: + self._current_part.close() + p = self._current_part = next(self._iter_parts) + except StopIteration: + p = None + return p + + def _iter_fields(self): + _fields = self.fields + if hasattr(self.fields, 'items'): + _fields = list(self.fields.items()) + for k, v in _fields: + file_name = None + file_type = None + file_headers = None + if isinstance(v, (list, tuple)): + if len(v) == 2: + file_name, file_pointer = v + elif len(v) == 3: + file_name, file_pointer, file_type = v + else: + file_name, file_pointer, file_type, file_headers = v + else: + file_pointer = v + + field = RequestField( + name=k, + data=file_pointer, + filename=file_name, + headers=file_headers, + header_formatter=format_header_param_rfc2231 + ) + + field.make_multipart(content_type=file_type) + yield field + + def _prepare_parts(self): + """This uses the fields provided by the user and creates Part objects. + + It populates the `parts` attribute and uses that to create a + generator for iteration. + """ + enc = self.encoding + self.parts = [Part.from_field(f, enc) for f in self._iter_fields()] + self._iter_parts = iter(self.parts) + + def _write(self, bytes_to_write): + """Write the bytes to the end of the buffer. + + :param bytes bytes_to_write: byte-string (or bytearray) to append to + the buffer + :returns: int -- the number of bytes written + """ + return self._buffer.append(bytes_to_write) + + def _write_boundary(self): + """Write the boundary to the end of the buffer.""" + return self._write(self._encoded_boundary) + + def _write_closing_boundary(self): + """Write the bytes necessary to finish a multipart/form-data body.""" + with reset(self._buffer): + self._buffer.seek(-2, 2) + self._buffer.write(b'--\r\n') + return 2 + + def _write_headers(self, headers): + """Write the current part's headers to the buffer.""" + return self._write(headers.encode(self.encoding) if isinstance(headers, str) else headers) + + @property + def content_type(self): + return str( + 'multipart/form-data; boundary={}'.format(self.boundary_value) + ) + + def to_string(self): + """Return the entirety of the data in the encoder. + + .. note:: + + This simply reads all of the data it can. If you have started + streaming or reading data from the encoder, this method will only + return whatever data is left in the encoder. + + .. note:: + + This method affects the internal state of the encoder. Calling + this method will exhaust the encoder. + + :returns: the multipart message + :rtype: bytes + """ + + return self.read() + + def read(self, size=-1): + """Read data from the streaming encoder. + + :param int size: (optional), If provided, ``read`` will return exactly + that many bytes. If it is not provided, it will return the + remaining bytes. + :returns: bytes + """ + if self.finished: + return self._buffer.read(size) + + bytes_to_load = size + if bytes_to_load != -1 and bytes_to_load is not None: + bytes_to_load = self._calculate_load_amount(int(size)) + + self._load(bytes_to_load) + return self._buffer.read(size) + + +class Part(object): + def __init__(self, headers, body): + self.headers = headers + self.body = body + self.headers_unread = True + self.len = len(self.headers) + total_len(self.body) + + def close(self): + if hasattr(self.body, "fd") and hasattr(self.body.fd, "close"): + self.body.fd.close() + elif hasattr(self.body, "close"): + self.body.close() + + @classmethod + def from_field(cls, field, encoding): + """Create a part from a Request Field generated by urllib3.""" + headers = field.render_headers().encode(encoding) + body = coerce_data(field.data, encoding) + return cls(headers, body) + + def bytes_left_to_write(self): + """Determine if there are bytes left to write. + + :returns: bool -- ``True`` if there are bytes left to write, otherwise + ``False`` + """ + to_read = 0 + if self.headers_unread: + to_read += len(self.headers) + + return (to_read + total_len(self.body)) > 0 + + def write_to(self, buffer, size): + """Write the requested amount of bytes to the buffer provided. + + The number of bytes written may exceed size on the first read since we + load the headers ambitiously. + + :param CustomBytesIO buffer: buffer we want to write bytes to + :param int size: number of bytes requested to be written to the buffer + :returns: int -- number of bytes actually written + """ + written = 0 + if self.headers_unread: + written += buffer.append(self.headers) + self.headers_unread = False + + while total_len(self.body) > 0 and (size == -1 or written < size): + amount_to_read = size + if size != -1: + amount_to_read = size - written + written += buffer.append(self.body.read(amount_to_read)) + + return written + + +class CustomBytesIO(io.BytesIO): + def __init__(self, buffer=None, encoding='utf-8'): + buffer = buffer.encode(encoding) if buffer else b"" + super(CustomBytesIO, self).__init__(buffer) + + def _get_end(self): + current_pos = self.tell() + self.seek(0, 2) + length = self.tell() + self.seek(current_pos, 0) + return length + + @property + def len(self): + length = self._get_end() + return length - self.tell() + + def append(self, bytes): + with reset(self): + written = self.write(bytes) + return written + + def smart_truncate(self): + to_be_read = total_len(self) + already_read = self._get_end() - to_be_read + + if already_read >= to_be_read: + old_bytes = self.read() + self.seek(0, 0) + self.truncate() + self.write(old_bytes) + self.seek(0, 0) # We want to be at the beginning + + +class FileWrapper(object): + def __init__(self, file_object): + self.fd = file_object + + @property + def len(self): + return total_len(self.fd) - self.fd.tell() + + def read(self, length=-1): + return self.fd.read(length) + + +@contextlib.contextmanager +def reset(buffer): + """Keep track of the buffer's current position and write to the end. + + This is a context manager meant to be used when adding data to the buffer. + It eliminates the need for every function to be concerned with the + position of the cursor in the buffer. + """ + original_position = buffer.tell() + buffer.seek(0, 2) + yield + buffer.seek(original_position, 0) + + +def coerce_data(data, encoding): + """Ensure that every object's __len__ behaves uniformly.""" + if not isinstance(data, CustomBytesIO): + if hasattr(data, 'getvalue'): + return CustomBytesIO(data.getvalue(), encoding) + + if hasattr(data, 'fileno'): + return FileWrapper(data) + + if not hasattr(data, 'read'): + return CustomBytesIO(data, encoding) + + return data + + +def total_len(o): + if hasattr(o, '__len__'): + return len(o) + + if hasattr(o, 'len'): + return o.len + + if hasattr(o, 'fileno'): + try: + fileno = o.fileno() + except io.UnsupportedOperation: + pass + else: + return os.fstat(fileno).st_size + + if hasattr(o, 'getvalue'): + # e.g. BytesIO, cStringIO.StringIO + return len(o.getvalue()) diff --git a/httpie/internal/update_warnings.py b/httpie/internal/update_warnings.py index a4b80d46b5..fb4df97d6c 100644 --- a/httpie/internal/update_warnings.py +++ b/httpie/internal/update_warnings.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Any, Optional, Callable -import requests +import niquests import httpie from httpie.context import Environment, LogLevel @@ -37,16 +37,34 @@ def _read_data_error_free(file: Path) -> Any: return {} -def _fetch_updates(env: Environment) -> str: +def _fetch_updates(env: Environment) -> None: file = env.config.version_info_file data = _read_data_error_free(file) - response = requests.get(PACKAGE_INDEX_LINK, verify=False) - response.raise_for_status() + try: + # HTTPie have a server that can return latest versions for various + # package channels, we shall attempt to retrieve this information once in a while + if hasattr(env.args, "verify"): + if env.args.verify.lower() in {"yes", "true", "no", "false"}: + verify = env.args.verify.lower() in {"yes", "true"} + else: + verify = env.args.verify + else: + verify = True + + response = niquests.get(PACKAGE_INDEX_LINK, verify=verify) + response.raise_for_status() + versions = response.json() + except (niquests.exceptions.ConnectionError, niquests.exceptions.HTTPError): + # in case of an error, let's ignore to avoid looping indefinitely. + # (spawn daemon background task maybe_fetch_update) + versions = { + BUILD_CHANNEL: httpie.__version__ + } data.setdefault('last_warned_date', None) data['last_fetched_date'] = datetime.now().isoformat() - data['last_released_versions'] = response.json() + data['last_released_versions'] = versions with open_with_lockfile(file, 'w') as stream: json.dump(data, stream) @@ -54,7 +72,7 @@ def _fetch_updates(env: Environment) -> str: def fetch_updates(env: Environment, lazy: bool = True): if lazy: - spawn_daemon('fetch_updates') + spawn_daemon('fetch_updates', f'--verify={env.args.verify}') else: _fetch_updates(env) diff --git a/httpie/models.py b/httpie/models.py index a0a68c8ddc..f20363f588 100644 --- a/httpie/models.py +++ b/httpie/models.py @@ -1,7 +1,8 @@ from time import monotonic -import requests -from urllib3.util import SKIP_HEADER, SKIPPABLE_HEADERS +import niquests + +from kiss_headers.utils import prettify_header_name from enum import Enum, auto from typing import Iterable, Union, NamedTuple @@ -14,10 +15,14 @@ OUT_RESP_HEAD, OUT_RESP_META ) -from .compat import cached_property +from .compat import urllib3, SKIP_HEADER, SKIPPABLE_HEADERS, cached_property from .utils import split_cookies, parse_content_type_header ELAPSED_TIME_LABEL = 'Elapsed time' +ELAPSED_DNS_RESOLUTION_LABEL = 'Elapsed DNS' +ELAPSED_TLS_HANDSHAKE = 'Elapsed TLS handshake' +ELAPSED_REQUEST_SEND = 'Elapsed emitting request' +ELAPSED_ESTABLISH_CONN = 'Elapsed established connection' class HTTPMessage: @@ -59,7 +64,7 @@ def content_type(self) -> str: class HTTPResponse(HTTPMessage): - """A :class:`requests.models.Response` wrapper.""" + """A :class:`niquests.models.Response` wrapper.""" def iter_body(self, chunk_size=1): return self._orig.iter_content(chunk_size=chunk_size) @@ -70,18 +75,19 @@ def iter_lines(self, chunk_size): @property def headers(self): original = self._orig + http_headers = original.raw.headers if original.raw and hasattr(original.raw, "headers") else original.headers status_line = f'HTTP/{self.version} {original.status_code} {original.reason}' headers = [status_line] headers.extend( - ': '.join(header) - for header in original.headers.items() - if header[0] != 'Set-Cookie' + ': '.join([prettify_header_name(header), value]) + for header, value in http_headers.items() + if header.lower() != 'set-cookie' ) headers.extend( f'Set-Cookie: {cookie}' - for header, value in original.headers.items() + for header, value in http_headers.items() for cookie in split_cookies(value) - if header == 'Set-Cookie' + if header.lower() == 'set-cookie' ) return '\r\n'.join(headers) @@ -89,12 +95,37 @@ def headers(self): def metadata(self) -> str: data = {} time_to_parse_headers = self._orig.elapsed.total_seconds() + # noinspection PyProtectedMember time_since_headers_parsed = monotonic() - self._orig._httpie_headers_parsed_at time_elapsed = time_to_parse_headers + time_since_headers_parsed - # data['Headers time'] = str(round(time_to_parse_headers, 5)) + 's' - # data['Body time'] = str(round(time_since_headers_parsed, 5)) + 's' - data[ELAPSED_TIME_LABEL] = str(round(time_elapsed, 10)) + 's' + + # metrics aren't guaranteed to be there. act with caution. + # see https://niquests.readthedocs.io/en/latest/user/advanced.html#event-hooks for more. + if hasattr(self._orig, "conn_info") and self._orig.conn_info: + if self._orig.conn_info.resolution_latency is not None: + if self._orig.conn_info.resolution_latency: + data[ELAPSED_DNS_RESOLUTION_LABEL] = f"{round(self._orig.conn_info.resolution_latency.total_seconds(), 10):6f}s" + else: + data[ELAPSED_DNS_RESOLUTION_LABEL] = "0s" + if self._orig.conn_info.established_latency is not None: + if self._orig.conn_info.established_latency: + data[ELAPSED_ESTABLISH_CONN] = f"{round(self._orig.conn_info.established_latency.total_seconds(), 10):6f}s" + else: + data[ELAPSED_ESTABLISH_CONN] = "0s" + if self._orig.conn_info.tls_handshake_latency is not None: + if self._orig.conn_info.tls_handshake_latency: + data[ELAPSED_TLS_HANDSHAKE] = f"{round(self._orig.conn_info.tls_handshake_latency.total_seconds(), 10):6f}s" + else: + data[ELAPSED_TLS_HANDSHAKE] = "0s" + if self._orig.conn_info.request_sent_latency is not None: + if self._orig.conn_info.request_sent_latency: + data[ELAPSED_REQUEST_SEND] = f"{round(self._orig.conn_info.request_sent_latency.total_seconds(), 10):6f}s" + else: + data[ELAPSED_REQUEST_SEND] = "0s" + + data[ELAPSED_TIME_LABEL] = f"{round(time_elapsed, 10):6f}s" + return '\n'.join( f'{key}: {value}' for key, value in data.items() @@ -108,27 +139,11 @@ def version(self) -> str: Assume HTTP/1.1 if version is not available. """ - mapping = { - 9: '0.9', - 10: '1.0', - 11: '1.1', - 20: '2.0', - } - fallback = 11 - version = None - try: - raw = self._orig.raw - if getattr(raw, '_original_response', None): - version = raw._original_response.version - else: - version = raw.version - except AttributeError: - pass - return mapping[version or fallback] + return self._orig.conn_info.http_version.value.replace("HTTP/", "").replace(".0", "") if self._orig.conn_info and self._orig.conn_info.http_version else "1.1" class HTTPRequest(HTTPMessage): - """A :class:`requests.models.Request` wrapper.""" + """A :class:`niquests.models.Request` wrapper.""" def iter_body(self, chunk_size): yield self.body @@ -136,14 +151,69 @@ def iter_body(self, chunk_size): def iter_lines(self, chunk_size): yield self.body, b'' + @property + def metadata(self) -> str: + conn_info: urllib3.ConnectionInfo = self._orig.conn_info + + metadatum = f"Connected to: {conn_info.destination_address[0]} port {conn_info.destination_address[1]}\n" + + if conn_info.certificate_dict: + metadatum += ( + f"Connection secured using: {conn_info.tls_version.name.replace('_', '.')} with {conn_info.cipher.replace('TLS_', '').replace('_', '-')}\n" + f"Server certificate: " + ) + + for entry in conn_info.certificate_dict['subject']: + if len(entry) == 2: + rdns, value = entry + elif len(entry) == 1: + rdns, value = entry[0] + else: + continue + + metadatum += f'{rdns}="{value}"; ' + + if "subjectAltName" in conn_info.certificate_dict: + for entry in conn_info.certificate_dict['subjectAltName']: + if len(entry) == 2: + rdns, value = entry + metadatum += f'{rdns}="{value}"; ' + + metadatum = metadatum[:-2] + "\n" + + metadatum += f'Certificate validity: "{conn_info.certificate_dict["notBefore"]}" to "{conn_info.certificate_dict["notAfter"]}"\n' + + if "issuer" in conn_info.certificate_dict: + metadatum += "Issuer: " + + for entry in conn_info.certificate_dict['issuer']: + if len(entry) == 2: + rdns, value = entry + elif len(entry) == 1: + rdns, value = entry[0] + else: + continue + + metadatum += f'{rdns}="{value}"; ' + + metadatum = metadatum[:-2] + "\n" + + if self._orig.ocsp_verified is None: + metadatum += "Revocation status: Unverified\n" + elif self._orig.ocsp_verified is True: + metadatum += "Revocation status: Good\n" + + return metadatum[:-1] + @property def headers(self): url = urlsplit(self._orig.url) - request_line = '{method} {path}{query} HTTP/1.1'.format( + request_line = '{method} {path}{query} {http_version}'.format( method=self._orig.method, path=url.path or '/', - query=f'?{url.query}' if url.query else '' + query=f'?{url.query}' if url.query else '', + http_version=self._orig.conn_info.http_version.value.replace(".0", "") if self._orig.conn_info and self._orig.conn_info.http_version else "HTTP/1.1" ) headers = self._orig.headers.copy() @@ -158,6 +228,7 @@ def headers(self): headers.insert(0, request_line) headers = '\r\n'.join(headers).strip() + return headers @property @@ -169,7 +240,7 @@ def body(self): return body or b'' -RequestsMessage = Union[requests.PreparedRequest, requests.Response] +RequestsMessage = Union[niquests.PreparedRequest, niquests.Response] class RequestsMessageKind(Enum): @@ -178,9 +249,9 @@ class RequestsMessageKind(Enum): def infer_requests_message_kind(message: RequestsMessage) -> RequestsMessageKind: - if isinstance(message, requests.PreparedRequest): + if isinstance(message, niquests.PreparedRequest): return RequestsMessageKind.REQUEST - elif isinstance(message, requests.Response): + elif isinstance(message, niquests.Response): return RequestsMessageKind.RESPONSE else: raise TypeError(f"Unexpected message type: {type(message).__name__}") @@ -190,6 +261,7 @@ def infer_requests_message_kind(message: RequestsMessage) -> RequestsMessageKind RequestsMessageKind.REQUEST: { 'headers': OUT_REQ_HEAD, 'body': OUT_REQ_BODY, + 'meta': OUT_RESP_META }, RequestsMessageKind.RESPONSE: { 'headers': OUT_RESP_HEAD, diff --git a/httpie/output/lexers/http.py b/httpie/output/lexers/http.py index aea827401e..728490115f 100644 --- a/httpie/output/lexers/http.py +++ b/httpie/output/lexers/http.py @@ -66,7 +66,7 @@ class SimplifiedHTTPLexer(pygments.lexer.RegexLexer): tokens = { 'root': [ # Request-Line - (r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)', + (r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)([0-9].?[0-9]?)', pygments.lexer.bygroups( request_method, pygments.token.Text, @@ -77,7 +77,7 @@ class SimplifiedHTTPLexer(pygments.lexer.RegexLexer): pygments.token.Number )), # Response Status-Line - (r'(HTTP)(/)(\d+\.\d+)( +)(.+)', + (r'(HTTP)(/)([0-9].?[0-9]?)( +)(.+)', pygments.lexer.bygroups( pygments.token.Keyword.Reserved, # 'HTTP' pygments.token.Operator, # '/' diff --git a/httpie/output/lexers/metadata.py b/httpie/output/lexers/metadata.py index fa68e45762..1d41a67446 100644 --- a/httpie/output/lexers/metadata.py +++ b/httpie/output/lexers/metadata.py @@ -1,6 +1,6 @@ import pygments -from httpie.models import ELAPSED_TIME_LABEL +from httpie.models import ELAPSED_TIME_LABEL, ELAPSED_DNS_RESOLUTION_LABEL, ELAPSED_TLS_HANDSHAKE, ELAPSED_REQUEST_SEND, ELAPSED_ESTABLISH_CONN from httpie.output.lexers.common import precise SPEED_TOKENS = { @@ -36,7 +36,7 @@ class MetadataLexer(pygments.lexer.RegexLexer): tokens = { 'root': [ ( - fr'({ELAPSED_TIME_LABEL})( *)(:)( *)(\d+\.\d+)(s)', pygments.lexer.bygroups( + fr'({ELAPSED_TIME_LABEL}|{ELAPSED_DNS_RESOLUTION_LABEL}|{ELAPSED_REQUEST_SEND}|{ELAPSED_TLS_HANDSHAKE}|{ELAPSED_ESTABLISH_CONN})( *)(:)( *)([\d]+[.\d]{{0,}})(s)', pygments.lexer.bygroups( pygments.token.Name.Decorator, # Name pygments.token.Text, pygments.token.Operator, # Colon diff --git a/httpie/output/streams.py b/httpie/output/streams.py index 811093808a..5c1171336a 100644 --- a/httpie/output/streams.py +++ b/httpie/output/streams.py @@ -5,7 +5,7 @@ from .processing import Conversion, Formatting from ..context import Environment from ..encoding import smart_decode, smart_encode, UTF8 -from ..models import HTTPMessage, OutputOptions +from ..models import HTTPMessage, OutputOptions, RequestsMessageKind from ..utils import parse_content_type_header @@ -62,6 +62,10 @@ def iter_body(self) -> Iterable[bytes]: def __iter__(self) -> Iterable[bytes]: """Return an iterator over `self.msg`.""" + if self.output_options.meta and self.output_options.kind is RequestsMessageKind.REQUEST: + yield self.get_metadata() + yield b'\n\n' + if self.output_options.headers: yield self.get_headers() yield b'\r\n\r\n' @@ -71,13 +75,26 @@ def __iter__(self) -> Iterable[bytes]: for chunk in self.iter_body(): yield chunk if self.on_body_chunk_downloaded: - self.on_body_chunk_downloaded(chunk) + # Niquests 3.7+ have a way to determine the "real" amt of raw data collected + # Useful when the remote compress the body. We use the "untouched" amt of data to determine + # the download speed. + if hasattr(self.msg, "_orig") and hasattr(self.msg._orig, "download_progress") and self.msg._orig.download_progress: + # this is plan A: using public interfaces! + self.on_body_chunk_downloaded(self.msg._orig.download_progress.total) + elif hasattr(self.msg, "_orig") and hasattr(self.msg._orig, "raw") and hasattr(self.msg._orig.raw, "_fp_bytes_read"): + # plan B, falling back on a private property that may disapear from urllib3-future... + # this case is mandatory due to how the mocking library works. it does not use any "socket" but + # rather a simple io.BytesIO. + self.on_body_chunk_downloaded(self.msg._orig.raw._fp_bytes_read) + else: + # well. this case will certainly cause issues if the body is compressed. + self.on_body_chunk_downloaded(chunk) except DataSuppressedError as e: if self.output_options.headers: yield b'\n' yield e.message - if self.output_options.meta: + if self.output_options.meta and self.output_options.kind is RequestsMessageKind.RESPONSE: if self.output_options.body: yield b'\n\n' @@ -88,7 +105,7 @@ def __iter__(self) -> Iterable[bytes]: class RawStream(BaseStream): """The message is streamed in chunks with no processing.""" - CHUNK_SIZE = 1024 * 100 + CHUNK_SIZE = -1 # '-1' means that we want to receive chunks exactly as they arrive. CHUNK_SIZE_BY_LINE = 1 def __init__(self, chunk_size=CHUNK_SIZE, **kwargs): diff --git a/httpie/output/ui/rich_progress.py b/httpie/output/ui/rich_progress.py index d2cfd38c70..41a2893d53 100644 --- a/httpie/output/ui/rich_progress.py +++ b/httpie/output/ui/rich_progress.py @@ -3,24 +3,27 @@ from httpie.context import Environment + if TYPE_CHECKING: from rich.console import Console @dataclass -class BaseDisplay: +class BaseProgressDisplay: env: Environment + total_size: Optional[float] + resumed_from: int + description: str + summary_suffix: str - def start( - self, *, total: Optional[float], at: float, description: str - ) -> None: - ... + def start(self): + raise NotImplementedError - def update(self, steps: float) -> None: - ... + def update(self, steps: float): + raise NotImplementedError - def stop(self, time_spent: float) -> None: - ... + def stop(self, time_spent: float): + raise NotImplementedError @property def console(self) -> 'Console': @@ -31,57 +34,58 @@ def _print_summary( self, is_finished: bool, observed_steps: int, time_spent: float ): from rich import filesize - if is_finished: verb = 'Done' else: verb = 'Interrupted' - total_size = filesize.decimal(observed_steps) + # noinspection PyTypeChecker avg_speed = filesize.decimal(observed_steps / time_spent) - minutes, seconds = divmod(time_spent, 60) hours, minutes = divmod(int(minutes), 60) if hours: total_time = f'{hours:d}:{minutes:02d}:{seconds:0.5f}' else: total_time = f'{minutes:02d}:{seconds:0.5f}' - self.console.print( - f'[progress.description]{verb}. {total_size} in {total_time} ({avg_speed}/s)' + f'[progress.description]{verb}. {total_size} in {total_time} ({avg_speed}/s){self.summary_suffix}' ) -class DummyDisplay(BaseDisplay): +class DummyProgressDisplay(BaseProgressDisplay): """ A dummy display object to be used when the progress bars, spinners etc. are disabled globally (or during tests). """ + def start(self): + pass + + def update(self, steps: float): + pass + + def stop(self, time_spent: float): + pass -class StatusDisplay(BaseDisplay): - def start( - self, *, total: Optional[float], at: float, description: str - ) -> None: - self.observed = at + +class ProgressDisplayNoTotal(BaseProgressDisplay): + observed = 0 + status = None + + def start(self) -> None: + self.observed = self.resumed_from self.description = ( - f'[progress.description]{description}[/progress.description]' + f'[progress.description]{self.description}[/progress.description]' ) - self.status = self.console.status(self.description, spinner='line') self.status.start() - def update(self, steps: float) -> None: + def update(self, steps: int) -> None: from rich import filesize - self.observed += steps - - observed_amount, observed_unit = filesize.decimal( - self.observed - ).split() - self.status.update( - status=f'{self.description} [progress.download]{observed_amount}/? {observed_unit}[/progress.download]' - ) + observed_amount, observed_unit = filesize.decimal(self.observed).split() + msg = f'{self.description} [progress.download]{observed_amount}/? {observed_unit}[/progress.download]' + self.status.update(status=msg) def stop(self, time_spent: float) -> None: self.status.stop() @@ -94,10 +98,11 @@ def stop(self, time_spent: float) -> None: ) -class ProgressDisplay(BaseDisplay): - def start( - self, *, total: Optional[float], at: float, description: str - ) -> None: +class ProgressDisplayFull(BaseProgressDisplay): + progress_bar = None + transfer_task = None + + def start(self) -> None: from rich.progress import ( Progress, BarColumn, @@ -105,9 +110,8 @@ def start( TimeRemainingColumn, TransferSpeedColumn, ) - - assert total is not None - self.console.print(f'[progress.description]{description}') + assert self.total_size is not None + self.console.print(f'[progress.description]{self.description}') self.progress_bar = Progress( '[', BarColumn(), @@ -123,7 +127,9 @@ def start( ) self.progress_bar.start() self.transfer_task = self.progress_bar.add_task( - description, completed=at, total=total + description=self.description, + completed=self.resumed_from, + total=self.total_size, ) def update(self, steps: float) -> None: diff --git a/httpie/output/writer.py b/httpie/output/writer.py index 4a2949bce2..4e4071cd83 100644 --- a/httpie/output/writer.py +++ b/httpie/output/writer.py @@ -1,5 +1,5 @@ import errno -import requests +import niquests from typing import Any, Dict, IO, Optional, TextIO, Tuple, Type, Union from ..cli.dicts import HTTPHeadersDict @@ -105,7 +105,7 @@ def write_raw_data( headers: Optional[HTTPHeadersDict] = None, stream_kwargs: Optional[Dict[str, Any]] = None ): - msg = requests.PreparedRequest() + msg = niquests.PreparedRequest() msg.is_body_upload_chunk = True msg.body = data msg.headers = headers or HTTPHeadersDict() diff --git a/httpie/plugins/base.py b/httpie/plugins/base.py index 1b44e5aec5..4e26242bc7 100644 --- a/httpie/plugins/base.py +++ b/httpie/plugins/base.py @@ -63,7 +63,7 @@ def get_auth(self, username: str = None, password: str = None): Use `self.raw_auth` to access the raw value passed through `--auth, -a`. - Return a ``requests.auth.AuthBase`` subclass instance. + Return a ``niquests.auth.AuthBase`` subclass instance. """ raise NotImplementedError() @@ -73,7 +73,7 @@ class TransportPlugin(BasePlugin): """ Requests transport adapter docs: - + See httpie-unixsocket for an example transport plugin: @@ -86,7 +86,7 @@ class TransportPlugin(BasePlugin): def get_adapter(self): """ - Return a ``requests.adapters.BaseAdapter`` subclass instance to be + Return a ``niquests.adapters.BaseAdapter`` subclass instance to be mounted to ``self.prefix``. """ diff --git a/httpie/plugins/builtin.py b/httpie/plugins/builtin.py index 860aebf7f9..ad79d0a53f 100644 --- a/httpie/plugins/builtin.py +++ b/httpie/plugins/builtin.py @@ -1,6 +1,6 @@ from base64 import b64encode -import requests.auth +import niquests.auth from .base import AuthPlugin @@ -10,12 +10,12 @@ class BuiltinAuthPlugin(AuthPlugin): package_name = '(builtin)' -class HTTPBasicAuth(requests.auth.HTTPBasicAuth): +class HTTPBasicAuth(niquests.auth.HTTPBasicAuth): def __call__( self, - request: requests.PreparedRequest - ) -> requests.PreparedRequest: + request: niquests.PreparedRequest + ) -> niquests.PreparedRequest: """ Override username/password serialization to allow unicode. @@ -34,12 +34,12 @@ def make_header(username: str, password: str) -> str: return f'Basic {token}' -class HTTPBearerAuth(requests.auth.AuthBase): +class HTTPBearerAuth(niquests.auth.AuthBase): def __init__(self, token: str) -> None: self.token = token - def __call__(self, request: requests.PreparedRequest) -> requests.PreparedRequest: + def __call__(self, request: niquests.PreparedRequest) -> niquests.PreparedRequest: request.headers['Authorization'] = f'Bearer {self.token}' return request @@ -64,8 +64,8 @@ def get_auth( self, username: str, password: str - ) -> requests.auth.HTTPDigestAuth: - return requests.auth.HTTPDigestAuth(username, password) + ) -> niquests.auth.HTTPDigestAuth: + return niquests.auth.HTTPDigestAuth(username, password) class BearerAuthPlugin(BuiltinAuthPlugin): @@ -75,5 +75,5 @@ class BearerAuthPlugin(BuiltinAuthPlugin): auth_parse = False # noinspection PyMethodOverriding - def get_auth(self, **kwargs) -> requests.auth.HTTPDigestAuth: + def get_auth(self, **kwargs) -> niquests.auth.HTTPDigestAuth: return HTTPBearerAuth(self.raw_auth) diff --git a/httpie/plugins/manager.py b/httpie/plugins/manager.py index 27af6eedac..04ca124ca7 100644 --- a/httpie/plugins/manager.py +++ b/httpie/plugins/manager.py @@ -1,18 +1,16 @@ -import sys import os +import sys import warnings - +from contextlib import contextmanager, nullcontext from itertools import groupby from operator import attrgetter -from typing import Dict, List, Type, Iterator, Iterable, Optional, ContextManager from pathlib import Path -from contextlib import contextmanager, nullcontext - -from ..compat import importlib_metadata, find_entry_points, get_dist_name +from typing import Dict, List, Type, Iterator, Iterable, Optional, ContextManager -from ..utils import repr_dict, get_site_paths from . import AuthPlugin, ConverterPlugin, FormatterPlugin, TransportPlugin from .base import BasePlugin +from ..compat import importlib_metadata, find_entry_points, get_dist_name, enforce_niquests +from ..utils import repr_dict, get_site_paths ENTRY_POINT_CLASSES = { @@ -64,6 +62,7 @@ def iter_entry_points(self, directory: Optional[Path] = None): yield from find_entry_points(eps, group=entry_point_name) def load_installed_plugins(self, directory: Optional[Path] = None): + enforce_niquests() for entry_point in self.iter_entry_points(directory): plugin_name = get_dist_name(entry_point) try: @@ -72,8 +71,8 @@ def load_installed_plugins(self, directory: Optional[Path] = None): warnings.warn( f'While loading "{plugin_name}", an error occurred: {exc}\n' f'For uninstallations, please use either "httpie plugins uninstall {plugin_name}" ' - f'or "pip uninstall {plugin_name}" (depending on how you installed it in the first ' - 'place).' + f'or "pip uninstall {plugin_name}" (depending on how you installed it in the first place). ' + 'The error might be related to HTTPie’s migration from `requests` to `niquests`.' ) continue plugin.package_name = plugin_name diff --git a/httpie/sessions.py b/httpie/sessions.py index 99dcdba92e..5351959a9b 100644 --- a/httpie/sessions.py +++ b/httpie/sessions.py @@ -10,8 +10,8 @@ from pathlib import Path from typing import Any, Dict, List, Optional, Union -from requests.auth import AuthBase -from requests.cookies import RequestsCookieJar, remove_cookie_by_name +from niquests.auth import AuthBase +from niquests.cookies import RequestsCookieJar, remove_cookie_by_name from .context import Environment, LogLevel from .cookies import HTTPieCookiePolicy diff --git a/httpie/ssl_.py b/httpie/ssl_.py index af5ca548db..9dbd27005f 100644 --- a/httpie/ssl_.py +++ b/httpie/ssl_.py @@ -1,22 +1,24 @@ import ssl -from typing import NamedTuple, Optional +import typing +from pathlib import Path +from typing import NamedTuple, Optional, Tuple, MutableMapping +import json +import os.path from httpie.adapters import HTTPAdapter -# noinspection PyPackageRequirements -from urllib3.util.ssl_ import ( - create_urllib3_context, - resolve_ssl_version, -) - +from .compat import create_urllib3_context, resolve_ssl_version +# the minimum one may hope to negotiate with Python 3.7+ is tls1+ +# anything else would be unsupported. SSL_VERSION_ARG_MAPPING = { - 'ssl2.3': 'PROTOCOL_SSLv23', - 'ssl3': 'PROTOCOL_SSLv3', 'tls1': 'PROTOCOL_TLSv1', 'tls1.1': 'PROTOCOL_TLSv1_1', 'tls1.2': 'PROTOCOL_TLSv1_2', - 'tls1.3': 'PROTOCOL_TLSv1_3', + 'tls1.3': 'PROTOCOL_TLS_CLIENT', # CPython does not have a "PROTOCOL_TLSv1_3" constant, so, we'll improvise. } +# todo: we'll need to update this in preparation for Python 3.13+ +# could be a removal (after a long deprecation about constants +# PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, ...). AVAILABLE_SSL_VERSION_ARG_MAPPING = { arg: getattr(ssl, constant_name) for arg, constant_name in SSL_VERSION_ARG_MAPPING.items() @@ -24,15 +26,70 @@ } +class QuicCapabilityCache( + MutableMapping[Tuple[str, int], Optional[Tuple[str, int]]] +): + """This class will help us keep (persistent across runs) what hosts are QUIC capable. + See https://urllib3future.readthedocs.io/en/latest/advanced-usage.html#remembering-http-3-over-quic-support for + the implementation guide.""" + + def __init__(self, path: Path): + self._path = path + self._cache = {} + if os.path.exists(path): + with open(path, "r") as fp: + try: + self._cache = json.load(fp) + except json.JSONDecodeError: # if the file is corrupted (invalid json) then, ignore it. + pass + + def save(self): + with open(self._path, "w") as fp: + json.dump(self._cache, fp) + + def __contains__(self, item: Tuple[str, int]): + return f"QUIC_{item[0]}_{item[1]}" in self._cache + + def __setitem__(self, key: Tuple[str, int], value: Optional[Tuple[str, int]]): + self._cache[f"QUIC_{key[0]}_{key[1]}"] = f"{value[0]}:{value[1]}" + self.save() + + def __getitem__(self, item: Tuple[str, int]): + key: str = f"QUIC_{item[0]}_{item[1]}" + if key in self._cache: + host, port = self._cache[key].split(":") + return host, int(port) + + return None + + def __delitem__(self, key: Tuple[str, int]): + key: str = f"QUIC_{key[0]}_{key[1]}" + if key in self._cache: + del self._cache[key] + self.save() + + def __len__(self): + return len(self._cache) + + def __iter__(self): + yield from self._cache.items() + + class HTTPieCertificate(NamedTuple): cert_file: Optional[str] = None key_file: Optional[str] = None key_password: Optional[str] = None - def to_raw_cert(self): - """Synthesize a requests-compatible (2-item tuple of cert and key file) + def to_raw_cert(self) -> typing.Union[ + typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]], # with password + typing.Tuple[typing.Optional[str], typing.Optional[str]] # without password + ]: + """Synthesize a niquests-compatible (2(or 3)-item tuple of cert, key file and optionally password) object from HTTPie's internal representation of a certificate.""" - return (self.cert_file, self.key_file) + if self.key_password: + # Niquests support 3-tuple repr in addition to the 2-tuple repr + return self.cert_file, self.key_file, self.key_password + return self.cert_file, self.key_file class HTTPieHTTPSAdapter(HTTPAdapter): @@ -43,24 +100,50 @@ def __init__( ciphers: str = None, **kwargs ): - self._ssl_context = self._create_ssl_context( - verify=verify, - ssl_version=ssl_version, - ciphers=ciphers, - ) + self._ssl_context = None + self._verify = None + + if ssl_version or ciphers: + # By default, almost all installed CPython have modern OpenSSL backends + # This actively prevent folks to negotiate "almost" dead TLS protocols + # HTTPie wants to help users when they explicitly expect "old" TLS support + # Common errors for user if not set: + # >- [SSL: NO_CIPHERS_AVAILABLE] no ciphers available + # >- [SSL: LEGACY_SIGALG_DISALLOWED_OR_UNSUPPORTED] legacy sigalg disallowed or unsupported + if ssl_version in {ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1} and ciphers is None: + # Please do not raise a "security" concern for that line. + # If the interpreter reach that line, it means that the user willingly set + # an unsafe TLS protocol. + ciphers = "DEFAULT:@SECLEVEL=0" + + # Only set the custom context if user supplied one. + # Because urllib3-future set his own secure ctx with a set of + # ciphers (moz recommended list). thus avoiding excluding QUIC + # in case some ciphers are accidentally excluded. + self._ssl_context = self._create_ssl_context( + verify=verify, + ssl_version=ssl_version, + ciphers=ciphers, + ) + else: + self._verify = verify + super().__init__(**kwargs) def init_poolmanager(self, *args, **kwargs): kwargs['ssl_context'] = self._ssl_context + if self._verify is not None: + kwargs['cert_reqs'] = ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE return super().init_poolmanager(*args, **kwargs) def proxy_manager_for(self, *args, **kwargs): kwargs['ssl_context'] = self._ssl_context + if self._verify is not None: + kwargs['cert_reqs'] = ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE return super().proxy_manager_for(*args, **kwargs) def cert_verify(self, conn, url, verify, cert): if isinstance(cert, HTTPieCertificate): - conn.key_password = cert.key_password cert = cert.to_raw_cert() return super().cert_verify(conn, url, verify, cert) @@ -71,6 +154,19 @@ def _create_ssl_context( ssl_version: str = None, ciphers: str = None, ) -> 'ssl.SSLContext': + # HTTPie will take `ssl.PROTOCOL_TLS_CLIENT` as TLS 1.3 enforced! + # This piece of code is only triggered if user supplied --ssl=tls1.3 + if ssl_version is ssl.PROTOCOL_TLS_CLIENT: + return create_urllib3_context( + ciphers=ciphers, + ssl_minimum_version=ssl.TLSVersion.TLSv1_3, + ssl_maximum_version=ssl.TLSVersion.TLSv1_3, + # Since we are using a custom SSL context, we need to pass this + # here manually, even though it’s also passed to the connection + # in `super().cert_verify()`. + cert_reqs=ssl.CERT_REQUIRED if verify else ssl.CERT_NONE + ) + return create_urllib3_context( ciphers=ciphers, ssl_version=resolve_ssl_version(ssl_version), diff --git a/httpie/uploads.py b/httpie/uploads.py index 4a993b3a25..3de4fd3716 100644 --- a/httpie/uploads.py +++ b/httpie/uploads.py @@ -3,18 +3,16 @@ import zlib import functools import threading -from typing import Any, Callable, IO, Iterable, Optional, Tuple, Union, TYPE_CHECKING +from typing import Any, Callable, IO, Iterable, Optional, Tuple, Union from urllib.parse import urlencode -import requests -from requests.utils import super_len - -if TYPE_CHECKING: - from requests_toolbelt import MultipartEncoder +import niquests +from niquests.utils import super_len from .context import Environment from .cli.dicts import MultipartRequestDataDict, RequestDataDict from .compat import is_windows +from .internal.encoder import MultipartEncoder class ChunkedStream: @@ -172,7 +170,6 @@ def _prepare_file_for_upload( ) if chunked: - from requests_toolbelt import MultipartEncoder if isinstance(file, MultipartEncoder): return ChunkedMultipartUploadStream( encoder=file, @@ -232,7 +229,6 @@ def get_multipart_data_and_content_type( boundary: str = None, content_type: str = None, ) -> Tuple['MultipartEncoder', str]: - from requests_toolbelt import MultipartEncoder encoder = MultipartEncoder( fields=data.items(), @@ -250,7 +246,7 @@ def get_multipart_data_and_content_type( def compress_request( - request: requests.PreparedRequest, + request: niquests.PreparedRequest, always: bool, ): deflater = zlib.compressobj() diff --git a/httpie/utils.py b/httpie/utils.py index 4735b2be5d..e021f1a349 100644 --- a/httpie/utils.py +++ b/httpie/utils.py @@ -16,7 +16,7 @@ from urllib.parse import urlsplit from typing import Any, List, Optional, Tuple, Generator, Callable, Iterable, IO, TypeVar -import requests.auth +import niquests.auth RE_COOKIE_SPLIT = re.compile(r', (?=[^ ;]+=)') Item = Tuple[str, Any] @@ -121,7 +121,7 @@ def humanize_bytes(n, precision=2): return f'{n / factor:.{precision}f} {suffix}' -class ExplicitNullAuth(requests.auth.AuthBase): +class ExplicitNullAuth(niquests.auth.AuthBase): """Forces requests to ignore the ``.netrc``. """ @@ -201,7 +201,7 @@ def _max_age_to_expires(cookies, now): def parse_content_type_header(header): - """Borrowed from requests.""" + """Borrowed from niquests.""" tokens = header.split(';') content_type, params = tokens[0].strip(), tokens[1:] params_dict = {} @@ -307,3 +307,7 @@ def split_version(version: str) -> Tuple[int, ...]: return tuple(parts) return split_version(version_1) > split_version(version_2) + + +def split_header_values(header: str) -> List[str]: + return [value.strip() for value in header.split(',')] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index ced65979b1..0000000000 --- a/pytest.ini +++ /dev/null @@ -1,7 +0,0 @@ -[pytest] -markers = - # If you want to run tests without a full HTTPie installation - # we advise you to disable the markers below, e.g: - # pytest -m 'not requires_installation and not requires_external_processes' - requires_installation - requires_external_processes diff --git a/setup.cfg b/setup.cfg index 3766339326..647fdc4b42 100644 --- a/setup.cfg +++ b/setup.cfg @@ -11,6 +11,27 @@ testpaths = httpie tests norecursedirs = tests/fixtures addopts = --tb=native --doctest-modules --verbose xfail_strict = True +markers = + # If you want to run tests without a full HTTPie installation + # we advise you to disable the markers below, e.g: + # pytest -m 'not requires_installation and not requires_external_processes' + requires_installation + requires_external_processes +filterwarnings = + default + # due to urllib3.future no longer needing http.client! nothing to be concerned about. + ignore:Passing msg=\.\. is deprecated:DeprecationWarning + # this only concern the test suite / local test server with a self signed certificate. + ignore:Unverified HTTPS request is being made to host:urllib3.exceptions.InsecureRequestWarning + # the constant themselves are deprecated in the ssl module, we want to silent them in the test suite until we + # change the concerned code. Python 3.13 may remove them, so we'll need to think about it soon. + ignore:ssl\.PROTOCOL_(TLSv1|TLSv1_1|TLSv1_2) is deprecated:DeprecationWarning + ignore:ssl\.TLSVersion\.(TLSv1|TLSv1_1|TLSv1_2) is deprecated:DeprecationWarning + # Happen in Windows. Oppose no threats to our test suite. + # "An operation was attempted on something that is not a socket" during shutdown + ignore:Exception in thread:pytest.PytestUnhandledThreadExceptionWarning + ignore:subprocess [0-9]+ is still running:ResourceWarning + ignore:This process \(pid=[0-9]+\) is multi\-threaded:DeprecationWarning [metadata] name = httpie @@ -50,10 +71,8 @@ install_requires = pip charset_normalizer>=2.0.0 defusedxml>=0.6.0 - requests[socks] >=2.22.0, <=2.31.0 + niquests[socks]>=3.9 Pygments>=2.5.2 - requests-toolbelt>=0.9.1 - multidict>=4.7.0 setuptools importlib-metadata>=1.4.0; python_version<"3.8" rich>=9.10.0 @@ -84,13 +103,12 @@ dev = pytest-httpbin>=0.0.6 responses pytest-mock - werkzeug<2.1.0 + werkzeug<4 flake8 flake8-comprehensions flake8-deprecated flake8-mutable flake8-tuple - pyopenssl pytest-cov pyyaml twine @@ -101,7 +119,7 @@ test = pytest-httpbin>=0.0.6 responses pytest-mock - werkzeug<2.1.0 + werkzeug<4 [options.data_files] share/man/man1 = diff --git a/tests/conftest.py b/tests/conftest.py index 551a636789..31ef202a61 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,6 @@ HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN, HTTPBIN_WITH_CHUNKED_SUPPORT, REMOTE_HTTPBIN_DOMAIN, - IS_PYOPENSSL, mock_env ) from .utils.plugins_cli import ( # noqa @@ -80,17 +79,8 @@ def remote_httpbin(_remote_httpbin_available): pytest.skip(f'{REMOTE_HTTPBIN_DOMAIN} not resolvable') -@pytest.fixture(autouse=True, scope='session') -def pyopenssl_inject(): - """ - Injects `pyOpenSSL` module to make sure `requests` will use it. - - """ - if IS_PYOPENSSL: - try: - import urllib3.contrib.pyopenssl - urllib3.contrib.pyopenssl.inject_into_urllib3() - except ModuleNotFoundError: - pytest.fail('Missing "pyopenssl" module.') - - yield +@pytest.fixture +def remote_httpbin_secure(_remote_httpbin_available): + if _remote_httpbin_available: + return 'https://' + REMOTE_HTTPBIN_DOMAIN + pytest.skip(f'{REMOTE_HTTPBIN_DOMAIN} not resolvable') diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py index 6e6e73676e..69f55eb292 100644 --- a/tests/fixtures/__init__.py +++ b/tests/fixtures/__init__.py @@ -22,6 +22,7 @@ def patharg(path): JSON_FILE_PATH = FIXTURES_ROOT / 'test.json' JSON_WITH_DUPE_KEYS_FILE_PATH = FIXTURES_ROOT / 'test_with_dupe_keys.json' BIN_FILE_PATH = FIXTURES_ROOT / 'test.bin' +UTF8_IN_NAME_FILE_PATH = FIXTURES_ROOT / '天狗.txt' XML_FILES_PATH = FIXTURES_ROOT / 'xmldata' XML_FILES_VALID = list((XML_FILES_PATH / 'valid').glob('*_raw.xml')) diff --git "a/tests/fixtures/\345\244\251\347\213\227.txt" "b/tests/fixtures/\345\244\251\347\213\227.txt" new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test_auth.py b/tests/test_auth.py index 83423efec0..1b72f90043 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -92,9 +92,9 @@ def test_missing_auth(httpbin): def test_netrc(httpbin_both): - # This one gets handled by requests (no --auth, --auth-type present), - # that’s why we patch inside `requests.sessions`. - with mock.patch('requests.sessions.get_netrc_auth') as get_netrc_auth: + # This one gets handled by niquests (no --auth, --auth-type present), + # that’s why we patch inside `niquests.sessions`. + with mock.patch('niquests.sessions.get_netrc_auth') as get_netrc_auth: get_netrc_auth.return_value = ('httpie', 'password') r = http(httpbin_both + '/basic-auth/httpie/password') assert get_netrc_auth.call_count == 1 @@ -106,7 +106,7 @@ def test_ignore_netrc(httpbin_both): get_netrc_auth.return_value = ('httpie', 'password') r = http('--ignore-netrc', httpbin_both + '/basic-auth/httpie/password') assert get_netrc_auth.call_count == 0 - assert 'HTTP/1.1 401 UNAUTHORIZED' in r + assert 'HTTP/1.1 401 Unauthorized' in r def test_ignore_netrc_together_with_auth(): diff --git a/tests/test_binary.py b/tests/test_binary.py index c5e63ec30e..97f7c30391 100644 --- a/tests/test_binary.py +++ b/tests/test_binary.py @@ -1,5 +1,5 @@ """Tests for dealing with binary request and response data.""" -import requests +import niquests from .fixtures import BIN_FILE_PATH, BIN_FILE_CONTENT, BIN_FILE_PATH_ARG from httpie.output.streams import BINARY_SUPPRESSED_NOTICE @@ -32,19 +32,22 @@ def test_binary_file_form(self, httpbin): class TestBinaryResponseData: + # Local httpbin crashes due to an unfixed bug — it is merged but not yet released. + # + # TODO: switch to the local `httpbin` fixture when the fix is released. - def test_binary_suppresses_when_terminal(self, httpbin): - r = http('GET', httpbin + '/bytes/1024?seed=1') + def test_binary_suppresses_when_terminal(self, remote_httpbin): + r = http('GET', remote_httpbin + '/bytes/1024?seed=1') assert BINARY_SUPPRESSED_NOTICE.decode() in r - def test_binary_suppresses_when_not_terminal_but_pretty(self, httpbin): + def test_binary_suppresses_when_not_terminal_but_pretty(self, remote_httpbin): env = MockEnvironment(stdin_isatty=True, stdout_isatty=False) - r = http('--pretty=all', 'GET', httpbin + '/bytes/1024?seed=1', env=env) + r = http('--pretty=all', 'GET', remote_httpbin + '/bytes/1024?seed=1', env=env) assert BINARY_SUPPRESSED_NOTICE.decode() in r - def test_binary_included_and_correct_when_suitable(self, httpbin): + def test_binary_included_and_correct_when_suitable(self, remote_httpbin): env = MockEnvironment(stdin_isatty=True, stdout_isatty=False) - url = httpbin + '/bytes/1024?seed=1' + url = remote_httpbin + '/bytes/1024?seed=1' r = http('GET', url, env=env) - expected = requests.get(url).content + expected = niquests.get(url).content assert r == expected diff --git a/tests/test_cli.py b/tests/test_cli.py index 2cd27574af..63c485d525 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -2,7 +2,7 @@ import argparse import pytest -from requests.exceptions import InvalidSchema +from niquests.exceptions import InvalidSchema, MissingSchema import httpie.cli.argparser from httpie.cli import constants @@ -51,6 +51,9 @@ def test_escape_separator(self): } assert 'bar@baz' in items.files + # ensure we close the fixture file + items.multipart_data['bar@baz'][1].close() + @pytest.mark.parametrize('string, key, sep, value', [ ('path=c:\\windows', 'path', '=', 'c:\\windows'), ('path=c:\\windows\\', 'path', '=', 'c:\\windows\\'), @@ -127,6 +130,8 @@ def test_valid_items(self): assert (items.files['file'][1].read().strip(). decode() == FILE_CONTENT) + items.files['file'][1].close() + def test_multiple_file_fields_with_same_field_name(self): items = RequestItems.from_args([ self.key_value_arg('file_field@' + FILE_PATH_ARG), @@ -134,6 +139,9 @@ def test_multiple_file_fields_with_same_field_name(self): ]) assert len(items.files['file_field']) == 2 + for md in items.multipart_data['file_field']: + md[1].close() + def test_multiple_text_fields_with_same_field_name(self): items = RequestItems.from_args( request_item_args=[ @@ -360,13 +368,13 @@ def test_invalid_custom_scheme(self): # InvalidSchema is expected because HTTPie # shouldn't touch a formally valid scheme. with pytest.raises(InvalidSchema): - http('foo+bar-BAZ.123://bah') + http('foo+bar://bah') def test_invalid_scheme_via_via_default_scheme(self): # InvalidSchema is expected because HTTPie # shouldn't touch a formally valid scheme. - with pytest.raises(InvalidSchema): - http('bah', '--default=scheme=foo+bar-BAZ.123') + with pytest.raises((InvalidSchema, MissingSchema,)): + http('bah', '--default=scheme=foo+bar') def test_default_scheme_option(self, httpbin_secure): url = f'{httpbin_secure.host}:{httpbin_secure.port}' diff --git a/tests/test_config.py b/tests/test_config.py index 1d2eea0750..d987b7eca9 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,3 +1,4 @@ +import os.path from pathlib import Path import pytest @@ -23,6 +24,20 @@ def test_default_options(httpbin): } +def test_config_dir_is_created(): + dir_path = str(get_default_config_dir()) + "--fake" + + try: + os.rmdir(dir_path) + except FileNotFoundError: + pass + + assert not os.path.exists(dir_path) + Config(dir_path) + assert os.path.exists(dir_path) + os.rmdir(dir_path) + + def test_config_file_not_valid(httpbin): env = MockEnvironment() env.create_temp_config_dir() diff --git a/tests/test_cookie.py b/tests/test_cookie.py index c2a9746509..3d0f96dfd5 100644 --- a/tests/test_cookie.py +++ b/tests/test_cookie.py @@ -16,9 +16,19 @@ def setup_mock_server(self, handler): # Start running mock server in a separate thread. # Daemon threads automatically shut down when the main process exits. self.mock_server_thread = Thread(target=self.mock_server.serve_forever) - self.mock_server_thread.setDaemon(True) + self.mock_server_thread.daemon = True self.mock_server_thread.start() + def shutdown_mock_server(self): + if self.mock_server is None: + return + self.mock_server.socket.close() + self.mock_server.shutdown() + self.mock_server_thread.join() + + self.mock_server = None + self.mock_server_port = None + def test_cookie_parser(self): """Not directly testing HTTPie but `requests` to ensure their cookies handling is still as expected by `get_expired_cookies()`. @@ -45,3 +55,4 @@ def do_GET(self): response = http(f'http://localhost:{self.mock_server_port}/') assert 'Set-Cookie: hello=world; Path=/' in response assert 'Set-Cookie: oatmeal_raisin="is the best"; Path=/' in response + self.shutdown_mock_server() diff --git a/tests/test_downloads.py b/tests/test_downloads.py index b646a0e6a5..63acdc7d6e 100644 --- a/tests/test_downloads.py +++ b/tests/test_downloads.py @@ -1,21 +1,28 @@ import os import tempfile import time -import requests +import zlib from unittest import mock from urllib.request import urlopen +import niquests import pytest -from requests.structures import CaseInsensitiveDict - +import responses from httpie.downloads import ( - parse_content_range, filename_from_content_disposition, filename_from_url, - get_unique_filename, ContentRangeError, Downloader, PARTIAL_CONTENT + parse_content_range, + filename_from_content_disposition, + filename_from_url, + get_unique_filename, + ContentRangeError, + Downloader, + PARTIAL_CONTENT, + DECODED_FROM_SUFFIX, ) -from .utils import http, MockEnvironment +from niquests.structures import CaseInsensitiveDict +from .utils import http, MockEnvironment, cd_clean_tmp_dir, DUMMY_URL -class Response(requests.Response): +class Response(niquests.Response): # noinspection PyDefaultArgument def __init__(self, url, headers={}, status_code=200): self.url = url @@ -102,7 +109,6 @@ def test_filename_from_url(self): def test_unique_filename(self, get_filename_max_length, orig_name, unique_on_attempt, expected): - def attempts(unique_on_attempt=0): # noinspection PyUnresolvedReferences,PyUnusedLocal def exists(filename): @@ -120,7 +126,7 @@ def exists(filename): assert expected == actual -class TestDownloads: +class TestDownloader: def test_actual_download(self, httpbin_both, httpbin): robots_txt = '/robots.txt' @@ -145,7 +151,7 @@ def test_download_with_Content_Length(self, mock_env, httpbin_both): time.sleep(1.1) downloader.chunk_downloaded(b'12345') downloader.finish() - assert not downloader.interrupted + assert not downloader.is_interrupted def test_download_no_Content_Length(self, mock_env, httpbin_both): with open(os.devnull, 'w') as devnull: @@ -157,37 +163,31 @@ def test_download_no_Content_Length(self, mock_env, httpbin_both): time.sleep(1.1) downloader.chunk_downloaded(b'12345') downloader.finish() - assert not downloader.interrupted + assert not downloader.is_interrupted def test_download_output_from_content_disposition(self, mock_env, httpbin_both): - with tempfile.TemporaryDirectory() as tmp_dirname: - orig_cwd = os.getcwd() - os.chdir(tmp_dirname) - try: - assert not os.path.isfile('filename.bin') - downloader = Downloader(mock_env) - downloader.start( - final_response=Response( - url=httpbin_both.url + '/', - headers={ - 'Content-Length': 5, - 'Content-Disposition': 'attachment; filename="filename.bin"', - } - ), - initial_url='/' - ) - downloader.chunk_downloaded(b'12345') - downloader.finish() - downloader.failed() # Stop the reporter - assert not downloader.interrupted + output_file_name = 'filename.bin' + with cd_clean_tmp_dir(assert_filenames_after=[output_file_name]): + downloader = Downloader(mock_env) + downloader.start( + final_response=Response( + url=httpbin_both.url + '/', + headers={ + 'Content-Length': 5, + 'Content-Disposition': f'attachment; filename="{output_file_name}"', + } + ), + initial_url='/' + ) + downloader.chunk_downloaded(b'12345') + downloader.finish() + downloader.failed() # Stop the reporter + assert not downloader.is_interrupted - # TODO: Auto-close the file in that case? - downloader._output_file.close() - assert os.path.isfile('filename.bin') - finally: - os.chdir(orig_cwd) + # TODO: Auto-close the file in that case? + downloader._output_file.close() - def test_download_interrupted(self, mock_env, httpbin_both): + def test_downloader_is_interrupted(self, mock_env, httpbin_both): with open(os.devnull, 'w') as devnull: downloader = Downloader(mock_env, output_file=devnull) downloader.start( @@ -199,7 +199,7 @@ def test_download_interrupted(self, mock_env, httpbin_both): ) downloader.chunk_downloaded(b'1234') downloader.finish() - assert downloader.interrupted + assert downloader.is_interrupted def test_download_resumed(self, mock_env, httpbin_both): with tempfile.TemporaryDirectory() as tmp_dirname: @@ -220,7 +220,7 @@ def test_download_resumed(self, mock_env, httpbin_both): downloader.chunk_downloaded(b'123') downloader.finish() downloader.failed() - assert downloader.interrupted + assert downloader.is_interrupted # Write bytes with open(file, 'wb') as fh: @@ -233,13 +233,15 @@ def test_download_resumed(self, mock_env, httpbin_both): # Ensure `pre_request()` is working as expected too headers = {} downloader.pre_request(headers) - assert headers['Accept-Encoding'] == 'identity' assert headers['Range'] == 'bytes=3-' downloader.start( final_response=Response( url=httpbin_both.url + '/', - headers={'Content-Length': 5, 'Content-Range': 'bytes 3-4/5'}, + headers={ + 'Content-Length': 5, + 'Content-Range': 'bytes 3-4/5', + }, status_code=PARTIAL_CONTENT ), initial_url='/' @@ -250,12 +252,106 @@ def test_download_resumed(self, mock_env, httpbin_both): def test_download_with_redirect_original_url_used_for_filename(self, httpbin): # Redirect from `/redirect/1` to `/get`. expected_filename = '1.json' - orig_cwd = os.getcwd() - with tempfile.TemporaryDirectory() as tmp_dirname: - os.chdir(tmp_dirname) - try: - assert os.listdir('.') == [] - http('--download', httpbin + '/redirect/1') - assert os.listdir('.') == [expected_filename] - finally: - os.chdir(orig_cwd) + with cd_clean_tmp_dir(assert_filenames_after=[expected_filename]): + http('--download', httpbin + '/redirect/1') + + def test_download_gzip_content_encoding(self, httpbin): + expected_filename = 'gzip.json' + with cd_clean_tmp_dir(assert_filenames_after=[expected_filename]): + r = http('--download', httpbin + '/gzip') + assert r.exit_status == 0 + + @responses.activate + def test_incomplete_response(self): + # We have incompleteness checks in the downloader, but it might not be needed as it’s built into (ni|req)uests. + error_msg = 'IncompleteRead(2 bytes read, 1 more expected)' + responses.add( + method=responses.GET, + url=DUMMY_URL, + headers={ + 'Content-Length': '3', + }, + body='12', + ) + with cd_clean_tmp_dir(), pytest.raises(Exception) as exc_info: + http('--download', DUMMY_URL) + assert error_msg in str(exc_info.value) + + +class TestDecodedDownloads: + """Test downloading responses with `Content-Encoding`""" + + @responses.activate + def test_decoded_response_no_content_length(self): + responses.add( + method=responses.GET, + url=DUMMY_URL, + headers={ + 'Content-Encoding': 'deflate', + }, + body=zlib.compress(b"foobar"), + ) + with cd_clean_tmp_dir(): + r = http('--download', '--headers', DUMMY_URL) + print(r.stderr) + assert DECODED_FROM_SUFFIX.format(encodings='`deflate`') in r.stderr + + @responses.activate + def test_decoded_response_with_content_length(self): + payload = zlib.compress(b"foobar") + + responses.add( + method=responses.GET, + url=DUMMY_URL, + headers={ + 'Content-Encoding': 'deflate', + 'Content-Length': str(len(payload)), + }, + body=payload, + ) + with cd_clean_tmp_dir(): + r = http('--download', DUMMY_URL) + print(r.stderr) + assert DECODED_FROM_SUFFIX.format(encodings='`deflate`') in r.stderr + + @responses.activate + def test_decoded_response_without_content_length(self): + responses.add( + method=responses.GET, + url=DUMMY_URL, + headers={ + 'Content-Encoding': 'deflate', + }, + body=zlib.compress(b'foobar'), + ) + with cd_clean_tmp_dir(): + r = http('--download', DUMMY_URL) + print(r.stderr) + assert DECODED_FROM_SUFFIX.format(encodings='`deflate`') in r.stderr + + @responses.activate + def test_non_decoded_response_without_content_length(self): + responses.add( + method=responses.GET, + url=DUMMY_URL, + headers={ + 'Content-Length': '3', + }, + body='123', + ) + with cd_clean_tmp_dir(): + r = http('--download', DUMMY_URL) + print(r.stderr) + + @responses.activate + def test_non_decoded_response_with_content_length(self): + responses.add( + method=responses.GET, + url=DUMMY_URL, + headers={ + }, + body='123', + ) + with cd_clean_tmp_dir(): + r = http('--download', DUMMY_URL) + print(r.stderr) diff --git a/tests/test_early_response.py b/tests/test_early_response.py new file mode 100644 index 0000000000..809c098bfa --- /dev/null +++ b/tests/test_early_response.py @@ -0,0 +1,11 @@ +from .utils import http + + +def test_early_response_show(remote_httpbin_secure): + r = http( + "--verify=no", + 'https://early-hints.fastlylabs.com/' + ) + + assert "103 Early Hints" in r + assert "200 OK" in r diff --git a/tests/test_encoding.py b/tests/test_encoding.py index 62814161ed..cfd2183f21 100644 --- a/tests/test_encoding.py +++ b/tests/test_encoding.py @@ -168,7 +168,6 @@ def test_terminal_output_response_content_type_charset_with_stream(charset, text method=responses.GET, url=DUMMY_URL, body=f'\n{text}'.encode(charset), - stream=True, content_type=f'text/xml; charset={charset.upper()}', ) r = http('--pretty', pretty, '--stream', DUMMY_URL) diff --git a/tests/test_errors.py b/tests/test_errors.py index fca48fff15..fb9f030dcf 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -3,7 +3,7 @@ from unittest import mock from pytest import raises from requests import Request -from requests.exceptions import ConnectionError +from niquests.exceptions import ConnectionError from httpie.status import ExitStatus from .utils import HTTP_OK, http diff --git a/tests/test_exit_status.py b/tests/test_exit_status.py index 02fd2f1fe5..ec8849149a 100644 --- a/tests/test_exit_status.py +++ b/tests/test_exit_status.py @@ -26,7 +26,7 @@ def test_ok_response_exits_0(httpbin): def test_error_response_exits_0_without_check_status(httpbin): r = http('GET', httpbin + '/status/500') - assert '500 INTERNAL SERVER ERROR' in r + assert '500 Internal Server Error' in r assert r.exit_status == ExitStatus.SUCCESS assert not r.stderr @@ -44,7 +44,7 @@ def test_3xx_check_status_exits_3_and_stderr_when_stdout_redirected( r = http('--check-status', '--headers', 'GET', httpbin + '/status/301', env=env, tolerate_error_exit_status=True) - assert '301 MOVED PERMANENTLY' in r + assert '301 Moved Permanently' in r assert r.exit_status == ExitStatus.ERROR_HTTP_3XX assert '301 moved permanently' in r.stderr.lower() @@ -61,7 +61,7 @@ def test_3xx_check_status_redirects_allowed_exits_0(httpbin): def test_4xx_check_status_exits_4(httpbin): r = http('--check-status', 'GET', httpbin + '/status/401', tolerate_error_exit_status=True) - assert '401 UNAUTHORIZED' in r + assert '401 Unauthorized' in r assert r.exit_status == ExitStatus.ERROR_HTTP_4XX # Also stderr should be empty since stdout isn't redirected. assert not r.stderr @@ -70,5 +70,5 @@ def test_4xx_check_status_exits_4(httpbin): def test_5xx_check_status_exits_5(httpbin): r = http('--check-status', 'GET', httpbin + '/status/500', tolerate_error_exit_status=True) - assert '500 INTERNAL SERVER ERROR' in r + assert '500 Internal Server Error' in r assert r.exit_status == ExitStatus.ERROR_HTTP_5XX diff --git a/tests/test_h2n3.py b/tests/test_h2n3.py new file mode 100644 index 0000000000..6a680ecc70 --- /dev/null +++ b/tests/test_h2n3.py @@ -0,0 +1,164 @@ +import pytest +import json + +from httpie.ssl_ import QuicCapabilityCache + +from .utils import HTTP_OK, http, PersistentMockEnvironment + +try: + import qh3 +except ImportError: + qh3 = None + + +def test_should_not_do_http1_by_default(remote_httpbin_secure): + r = http( + "--verify=no", + remote_httpbin_secure + '/get' + ) + + assert "HTTP/1" not in r + assert HTTP_OK in r + + +def test_disable_http2n3(remote_httpbin_secure): + r = http( + # Only for DEV environment! + "--verify=no", # we have REQUESTS_CA_BUNDLE environment set, so we must disable ext verify. + '--disable-http2', + '--disable-http3', + remote_httpbin_secure + '/get' + ) + + assert "HTTP/1.1" in r + assert HTTP_OK in r + + +@pytest.mark.skipif(qh3 is None, reason="test require HTTP/3 support") +def test_force_http3(remote_httpbin_secure): + r = http( + "--verify=no", + '--http3', + remote_httpbin_secure + '/get' + ) + + assert "HTTP/3" in r + assert HTTP_OK in r + + +def test_force_multiple_error(remote_httpbin_secure): + r = http( + "--verify=no", + '--http3', + '--http2', + remote_httpbin_secure + '/get', + tolerate_error_exit_status=True, + ) + + assert 'You may only force one of --http1, --http2 or --http3.' in r.stderr + + +def test_disable_all_error_https(remote_httpbin_secure): + r = http( + "--verify=no", + '--disable-http1', + '--disable-http2', + '--disable-http3', + remote_httpbin_secure + '/get', + tolerate_error_exit_status=True, + ) + + assert 'At least one HTTP protocol version must be enabled.' in r.stderr + + +def test_disable_all_error_http(remote_httpbin): + r = http( + "--verify=no", + '--disable-http1', + '--disable-http2', + remote_httpbin + '/get', + tolerate_error_exit_status=True, + ) + + try: + import qh3 # noqa: F401 + except ImportError: + # that branch means that the user does not have HTTP/3 + # so, the message says that he disabled everything. + assert 'You disabled every supported protocols.' in r.stderr + else: + assert 'No compatible protocol are enabled to emit request. You currently are connected using TCP Unencrypted and must have HTTP/1.1 or/and HTTP/2 enabled to pursue.' in r.stderr + + +@pytest.fixture +def with_quic_cache_persistent(tmp_path): + env = PersistentMockEnvironment() + env.config['quic_file'] = tmp_path / 'quic.json' + try: + yield env + finally: + env.cleanup(force=True) + + +@pytest.mark.skipif(qh3 is None, reason="test require HTTP/3 support") +def test_ensure_quic_cache(remote_httpbin_secure, with_quic_cache_persistent): + """ + This test aim to verify that the QuicCapabilityCache work as intended. + """ + r = http( + "--verify=no", + remote_httpbin_secure + '/get', + env=with_quic_cache_persistent + ) + + assert "HTTP/2" in r + assert HTTP_OK in r + + r = http( + "--verify=no", + remote_httpbin_secure + '/get', + env=with_quic_cache_persistent + ) + + assert "HTTP/3" in r + assert HTTP_OK in r + + tmp_path = with_quic_cache_persistent.config['quic_file'] + + with open(tmp_path, "r") as fp: + cache = json.load(fp) + + assert len(cache) == 1 + assert "pie.dev" in list(cache.keys())[0] + + +@pytest.mark.skipif(qh3 is None, reason="test require HTTP/3 support") +def test_h3_not_compatible_anymore(remote_httpbin_secure, with_quic_cache_persistent): + """verify that we can handle failures and fallback appropriately.""" + tmp_path = with_quic_cache_persistent.config['quic_file'] + + cache = QuicCapabilityCache(tmp_path) + + # doing a __setitem__ should trigger save automatically! + cache[("pie.dev", 443)] = ("pie.dev", 61443) # nothing listen on 61443! + + # without timeout + r = http( + "--verify=no", + remote_httpbin_secure + '/get', + env=with_quic_cache_persistent, + tolerate_error_exit_status=True + ) + + assert "HTTP/2 200 OK" in r + + # with timeout + r = http( + "--verify=no", + "--timeout=1", + remote_httpbin_secure + '/get', + env=with_quic_cache_persistent, + tolerate_error_exit_status=True + ) + + assert "HTTP/2 200 OK" in r diff --git a/tests/test_httpie.py b/tests/test_httpie.py index 5824340cda..1f0806c721 100644 --- a/tests/test_httpie.py +++ b/tests/test_httpie.py @@ -116,6 +116,15 @@ def test_POST_stdin(httpbin_both): assert FILE_CONTENT in r +def test_empty_stdin(httpbin_both): + env = MockEnvironment( + stdin=io.TextIOWrapper(StdinBytesIO(b"")), + stdin_isatty=False, + ) + r = http(httpbin_both + '/get', env=env) + assert HTTP_OK in r + + def test_POST_file(httpbin_both): r = http('--form', 'POST', httpbin_both + '/post', f'file@{FILE_PATH}') assert HTTP_OK in r diff --git a/tests/test_json.py b/tests/test_json.py index e758ebe7f4..bf1b3857e9 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -338,13 +338,14 @@ def test_complex_json_arguments_with_non_json(httpbin, request_type, value): [ r'foo\[key\]:=1', r'bar\[1\]:=2', - r'baz\[\]:3', + r'baz\[\]:=3', r'quux[key\[escape\]]:=4', r'quux[key 2][\\][\\\\][\\\[\]\\\]\\\[\n\\]:=5', ], { 'foo[key]': 1, 'bar[1]': 2, + 'baz[]': 3, 'quux': { 'key[escape]': 4, 'key 2': {'\\': {'\\\\': {'\\[]\\]\\[\\n\\': 5}}}, diff --git a/tests/test_meta.py b/tests/test_meta.py index a57b510f0d..1e47834e2c 100644 --- a/tests/test_meta.py +++ b/tests/test_meta.py @@ -4,12 +4,38 @@ from httpie.output.formatters.colors import PIE_STYLE_NAMES from .utils import http, MockEnvironment, COLOR +try: + import qh3 +except ImportError: + qh3 = None + def test_meta_elapsed_time(httpbin): r = http('--meta', httpbin + '/delay/1') assert f'{ELAPSED_TIME_LABEL}: 1.' in r +@pytest.mark.skipif(qh3 is None, reason="test require HTTP/3 support") +def test_meta_extended_tls(remote_httpbin_secure): + # using --verify=no may cause the certificate information not to display with Python < 3.10 + # it is guaranteed to be there when using HTTP/3 over QUIC. That's why we set the '--http3' flag. + # it's a known CPython limitation with getpeercert(binary_form=False). + r = http('--verify=no', '--http3', '--meta', remote_httpbin_secure + '/get') + + assert 'Connected to' in r + assert 'Connection secured using' in r + assert 'Server certificate' in r + assert 'Certificate validity' in r + assert 'Issuer' in r + assert 'Revocation status' in r + + # If this fail, you missed two extraneous RC after the metadata render. + # see output/streams.py L89 + # why do we need two? short story, in case of redirect, expect metadata to appear multiple times, + # and we don't want them glued to the request line for example. + assert str(r).endswith("\n\n") + + @pytest.mark.parametrize('style', ['auto', 'fruity', *PIE_STYLE_NAMES]) def test_meta_elapsed_time_colors(httpbin, style): r = http('--style', style, '--meta', httpbin + '/get', env=MockEnvironment(colors=256)) diff --git a/tests/test_network.py b/tests/test_network.py new file mode 100644 index 0000000000..f92a84c3b0 --- /dev/null +++ b/tests/test_network.py @@ -0,0 +1,134 @@ +import argparse + +import pytest + +from httpie.cli.ports import ( + MAX_PORT, + MIN_PORT, + OUTSIDE_VALID_PORT_RANGE_ERROR, + local_port_arg_type, + parse_local_port_arg, +) +from httpie.compat import has_ipv6_support +from .utils import HTTP_OK, http + + +def test_non_existent_interface_arg(httpbin): + """We ensure that HTTPie properly wire interface by passing an interface that does not exist. thus, we expect an error.""" + r = http( + '--interface=1.1.1.1', + httpbin + '/get', + tolerate_error_exit_status=True + ) + assert r.exit_status != 0 + assert ( + 'assign requested address' in r.stderr + or 'The requested address is not valid in its context' in r.stderr + ) + + +@pytest.mark.parametrize(['local_port_arg', 'expected_output'], [ + # Single ports — valid + ('0', 0), + ('-0', 0), + (str(MAX_PORT), MAX_PORT), + ('8000', 8000), + # Single ports — invalid + (f'{MIN_PORT - 1}', OUTSIDE_VALID_PORT_RANGE_ERROR), + (f'{MAX_PORT + 1}', OUTSIDE_VALID_PORT_RANGE_ERROR), + ('-', 'not a number'), + ('AAA', 'not a number'), + (' ', 'not a number'), + # Port ranges — valid + (f'{MIN_PORT}-{MAX_PORT}', (MIN_PORT, MAX_PORT)), + ('3000-8000', (3000, 8000)), + ('-0-8000', (0, 8000)), + ('0-0', (0, 0)), + # Port ranges — invalid + ('2-1', 'not a valid port range'), + ('2-', 'not a number'), + ('2-A', 'not a number'), + ('A-A', 'not a number'), + ('A-2', 'not a number'), + ('-10-1', OUTSIDE_VALID_PORT_RANGE_ERROR), + ('1--1', OUTSIDE_VALID_PORT_RANGE_ERROR), + ('-10--1', OUTSIDE_VALID_PORT_RANGE_ERROR), + (f'1-{MAX_PORT + 1}', OUTSIDE_VALID_PORT_RANGE_ERROR), +]) +def test_parse_local_port_arg(local_port_arg, expected_output): + expected_error = expected_output if isinstance(expected_output, str) else None + if not expected_error: + assert parse_local_port_arg(local_port_arg) == expected_output + else: + with pytest.raises(argparse.ArgumentTypeError, match=expected_error): + parse_local_port_arg(local_port_arg) + + +def test_local_port_arg_type(): + assert local_port_arg_type('1') == 1 + assert local_port_arg_type('1-1') == 1 + assert local_port_arg_type('1-3') in {1, 2, 3} + + +def test_invoke_with_out_of_range_local_port_arg(httpbin): + # An addition to the unittest tests + r = http( + '--local-port=70000', + httpbin + '/get', + tolerate_error_exit_status=True + ) + assert r.exit_status != 0 + assert OUTSIDE_VALID_PORT_RANGE_ERROR in r.stderr + + +@pytest.mark.parametrize('interface_arg', [ + '', + '-', + '10.25.a.u', + 'abc', + 'localhost', +]) +def test_invalid_interface_arg(httpbin, interface_arg): + r = http( + '--interface', + interface_arg, + httpbin + '/get', + tolerate_error_exit_status=True, + ) + assert f"'{interface_arg}' does not appear to be an IPv4 or IPv6" in r.stderr + + +def test_force_ipv6_on_unsupported_system(remote_httpbin): + orig = has_ipv6_support() + has_ipv6_support(False) + try: + r = http( + '-6', + remote_httpbin + '/get', + tolerate_error_exit_status=True, + ) + finally: + has_ipv6_support(orig) + assert 'Unable to force IPv6 because your system lack IPv6 support.' in r.stderr + + +def test_force_both_ipv6_and_ipv4(remote_httpbin): + r = http( + '-6', # force IPv6 + '-4', # force IPv4 + remote_httpbin + '/get', + tolerate_error_exit_status=True, + ) + + assert 'Unable to force both IPv4 and IPv6, omit the flags to allow both.' in r.stderr + + +def test_happy_eyeballs(remote_httpbin_secure): + r = http( + '--heb', # this will automatically and concurrently try IPv6 and IPv4 endpoints + '--verify=no', + remote_httpbin_secure + '/get', + ) + + assert r.exit_status == 0 + assert HTTP_OK in r diff --git a/tests/test_output.py b/tests/test_output.py index 2242177dbc..998de117c9 100644 --- a/tests/test_output.py +++ b/tests/test_output.py @@ -1,17 +1,14 @@ import argparse -from pathlib import Path -from unittest import mock - -import json -import os import io +import json import warnings +from pathlib import Path +from unittest import mock from urllib.request import urlopen +import niquests import pytest -import requests import responses - from httpie.cli.argtypes import ( PARSED_DEFAULT_FORMAT_OPTIONS, parse_format_options, @@ -21,7 +18,7 @@ from httpie.output.formatters.colors import get_lexer, PIE_STYLE_NAMES, BUNDLED_STYLES from httpie.status import ExitStatus from .fixtures import XML_DATA_RAW, XML_DATA_FORMATTED -from .utils import COLOR, CRLF, HTTP_OK, MockEnvironment, http, DUMMY_URL, strip_colors +from .utils import COLOR, CRLF, HTTP_OK, MockEnvironment, http, DUMMY_URL, strip_colors, cd_clean_tmp_dir # For ensuring test reproducibility, avoid using the unsorted @@ -97,18 +94,22 @@ def test_quiet_quiet_with_check_status_non_zero_pipe(self, httpbin): (['-q'], 1), (['-qq'], 0), ]) - # Might fail on Windows due to interference from other warnings. - @pytest.mark.xfail def test_quiet_on_python_warnings(self, test_patch, httpbin, flags, expected_warnings): def warn_and_run(*args, **kwargs): warnings.warn('warning!!') return ExitStatus.SUCCESS test_patch.side_effect = warn_and_run - with pytest.warns(None) as record: - http(*flags, httpbin + '/get') - assert len(record) == expected_warnings + if expected_warnings == 0: + with warnings.catch_warnings(): + warnings.simplefilter("error") + http(*flags, httpbin + '/get') + else: + with pytest.warns(Warning) as record: + http(*flags, httpbin + '/get') + + assert len(record) >= expected_warnings def test_double_quiet_on_error(self, httpbin): r = http( @@ -116,7 +117,7 @@ def test_double_quiet_on_error(self, httpbin): tolerate_error_exit_status=True, ) assert not r - assert 'Couldn’t resolve the given hostname' in r.stderr + assert 'Couldn’t resolve the given hostname' in r.stderr or 'Name or service not known' in r.stderr @pytest.mark.parametrize('quiet_flags', QUIET_SCENARIOS) @mock.patch('httpie.cli.argtypes.AuthCredentials._getpass', @@ -155,16 +156,13 @@ def test_quiet_with_explicit_output_options(self, httpbin, quiet_flags, output_o @pytest.mark.parametrize('quiet_flags', QUIET_SCENARIOS) @pytest.mark.parametrize('with_download', [True, False]) - def test_quiet_with_output_redirection(self, tmp_path, httpbin, quiet_flags, with_download): + def test_quiet_with_output_redirection(self, httpbin, quiet_flags, with_download): url = httpbin + '/robots.txt' output_path = Path('output.txt') env = MockEnvironment() - orig_cwd = os.getcwd() - output = requests.get(url).text + output = niquests.get(url).text extra_args = ['--download'] if with_download else [] - os.chdir(tmp_path) - try: - assert os.listdir('.') == [] + with cd_clean_tmp_dir(assert_filenames_after=[str(output_path)]): r = http( *quiet_flags, '--output', str(output_path), @@ -172,7 +170,6 @@ def test_quiet_with_output_redirection(self, tmp_path, httpbin, quiet_flags, wit url, env=env ) - assert os.listdir('.') == [str(output_path)] assert r == '' assert r.stderr == '' assert env.stderr is env.devnull @@ -181,8 +178,6 @@ def test_quiet_with_output_redirection(self, tmp_path, httpbin, quiet_flags, wit else: assert env.stdout is not env.devnull # --output swaps stdout. assert output_path.read_text(encoding=UTF8) == output - finally: - os.chdir(orig_cwd) class TestVerboseFlag: @@ -214,7 +209,7 @@ def test_verbose_json(self, httpbin): def test_verbose_implies_all(self, httpbin): r = http('--verbose', '--follow', httpbin + '/redirect/1') assert 'GET /redirect/1 HTTP/1.1' in r - assert 'HTTP/1.1 302 FOUND' in r + assert 'HTTP/1.1 302 Found' in r assert 'GET /get HTTP/1.1' in r assert HTTP_OK in r @@ -281,8 +276,14 @@ def test_ensure_status_code_is_shown_on_all_themes(http_server, style, msg): http_server + '/status/msg', '--raw', msg, env=env) + # Custom reason phrase are most likely to disappear, + # due to HTTP/2+ protocols. urllib3.future replace them anyway in HTTP/1.1 + # for uniformity across protocols. + if 'CUSTOM' in msg: + msg = ' OK' + # Trailing space is stripped away. - assert 'HTTP/1.0 200' + msg.rstrip() in strip_colors(r) + assert 'HTTP/1.1 200' + msg.rstrip() in strip_colors(r) class TestPrettyOptions: diff --git a/tests/test_redirects.py b/tests/test_redirects.py index f993f31171..44baf04cf2 100644 --- a/tests/test_redirects.py +++ b/tests/test_redirects.py @@ -15,7 +15,7 @@ def test_follow_all_redirects_shown(httpbin): r = http('--follow', '--all', httpbin + '/redirect/2') assert r.count('HTTP/1.1') == 3 - assert r.count('HTTP/1.1 302 FOUND', 2) + assert r.count('HTTP/1.1 302 Found', 2) assert HTTP_OK in r diff --git a/tests/test_regressions.py b/tests/test_regressions.py index 622d03d7ce..44c675c777 100644 --- a/tests/test_regressions.py +++ b/tests/test_regressions.py @@ -30,7 +30,6 @@ def test_output_devnull(httpbin): def test_verbose_redirected_stdout_separator(httpbin): """ - """ r = http( @@ -47,3 +46,13 @@ def test_verbose_redirected_stdout_separator(httpbin): Expect.RESPONSE_HEADERS, Expect.BODY, ]) + + +def test_every_localhost_resolve(httpbin): + """ + https://github.com/httpie/cli/issues/1458 + + """ + new_target = str(httpbin).replace('127.0.0.1', 'example.localhost') + assert 'example.localhost' in new_target + http(str(httpbin).replace('127.0.0.1', 'example.localhost') + '/get') diff --git a/tests/test_resolver.py b/tests/test_resolver.py new file mode 100644 index 0000000000..f3a6bc6e05 --- /dev/null +++ b/tests/test_resolver.py @@ -0,0 +1,68 @@ +import pytest + +from .utils import http + +try: + import qh3 +except ImportError: + qh3 = None + + +@pytest.mark.skipif(qh3 is None, reason="test require HTTP/3 support") +def test_ensure_resolver_used(remote_httpbin_secure): + """This test ensure we're using specified resolver to get into pie.dev. + Using a custom resolver with Niquests enable direct HTTP/3 negotiation and pie.dev + (DNS) is handled by Cloudflare (NS) services.""" + r = http( + "--verify=no", + "--resolver=doh+cloudflare://", + remote_httpbin_secure + "/get" + ) + + assert "HTTP/3" in r + + +def test_ensure_override_resolver_used(remote_httpbin): + """Just an additional check to ensure we are wired properly to Niquests resolver parameter.""" + r = http( + "--resolver=pie.dev:240.0.0.0", # override DNS response to TARPIT net addr. + "--disable-http3", + remote_httpbin + "/get", + tolerate_error_exit_status=True + ) + + assert "Request timed out" in r.stderr or "A socket operation was attempted to an unreachable network" in r.stderr + + +def test_invalid_override_resolver(): + r = http( + "--resolver=pie.dev:abc", # we do this nonsense on purpose + "pie.dev/get", + tolerate_error_exit_status=True + ) + + assert "'abc' does not appear to be an IPv4 or IPv6 address" in r.stderr + + r = http( + "--resolver=abc", # we do this nonsense on purpose + "pie.dev/get", + tolerate_error_exit_status=True + ) + + assert "The manual resolver for a specific host requires to be formatted like" in r.stderr + + r = http( + "--resolver=pie.dev:127.0.0", # we do this nonsense on purpose + "pie.dev/get", + tolerate_error_exit_status=True + ) + + assert "'127.0.0' does not appear to be an IPv4 or IPv6 address" in r.stderr + + r = http( + "--resolver=doz://example.com", # we do this nonsense on purpose + "pie.dev/get", + tolerate_error_exit_status=True + ) + + assert "'doz' is not a valid ProtocolResolver" in r.stderr diff --git a/tests/test_sessions.py b/tests/test_sessions.py index aa5243487d..97d7744102 100644 --- a/tests/test_sessions.py +++ b/tests/test_sessions.py @@ -1,15 +1,13 @@ import json -import os import shutil +from base64 import b64encode from contextlib import contextmanager from datetime import datetime -from unittest import mock from pathlib import Path from typing import Iterator +from unittest import mock import pytest - -from .fixtures import FILE_PATH_ARG, UNICODE from httpie.context import Environment from httpie.encoding import UTF8 from httpie.plugins import AuthPlugin @@ -17,9 +15,9 @@ from httpie.plugins.registry import plugin_manager from httpie.sessions import Session from httpie.utils import get_expired_cookies +from .fixtures import FILE_PATH_ARG, UNICODE from .test_auth_plugins import basic_auth -from .utils import DUMMY_HOST, HTTP_OK, MockEnvironment, http, mk_config_dir -from base64 import b64encode +from .utils import DUMMY_HOST, HTTP_OK, MockEnvironment, http, mk_config_dir, cd_clean_tmp_dir class SessionTestBase: @@ -253,13 +251,9 @@ def test_session_default_header_value_overwritten(self, httpbin): def test_download_in_session(self, tmp_path, httpbin): # https://github.com/httpie/cli/issues/412 self.start_session(httpbin) - cwd = os.getcwd() - os.chdir(tmp_path) - try: + with cd_clean_tmp_dir(assert_filenames_after=['get.json']): http('--session=test', '--download', httpbin + '/get', env=self.env()) - finally: - os.chdir(cwd) @pytest.mark.parametrize( 'auth_require_param, auth_parse_param', @@ -822,19 +816,27 @@ def test_session_multiple_headers_with_same_name(basic_session, httpbin): [ ( 'localhost_http_server', - {'secure_cookie': 'foo', 'insecure_cookie': 'bar'} + {'insecure_cookie': 'bar'} ), ( 'remote_httpbin', {'insecure_cookie': 'bar'} + ), + ( + 'httpbin_secure_untrusted', + {'secure_cookie': 'foo', 'insecure_cookie': 'bar'} ) ] ) def test_secure_cookies_on_localhost(mock_env, tmp_path, server, expected_cookies, request): server = request.getfixturevalue(server) session_path = tmp_path / 'session.json' + server = str(server).replace('127.0.0.1', 'localhost') + additional_args = ['--verify=no'] if "https" in server else [] + http( '--session', str(session_path), + *additional_args, server + '/cookies/set', 'secure_cookie==foo', 'insecure_cookie==bar' @@ -847,6 +849,8 @@ def test_secure_cookies_on_localhost(mock_env, tmp_path, server, expected_cookie r = http( '--session', str(session_path), + *additional_args, server + '/cookies' ) + assert r.json == {'cookies': expected_cookies} diff --git a/tests/test_ssl.py b/tests/test_ssl.py index 6fb983785a..6a6ba5c86f 100644 --- a/tests/test_ssl.py +++ b/tests/test_ssl.py @@ -2,7 +2,7 @@ import pytest import pytest_httpbin.certs -import requests.exceptions +import niquests.exceptions import urllib3 from unittest import mock @@ -10,23 +10,11 @@ from httpie.ssl_ import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS_STRING from httpie.status import ExitStatus -from .utils import HTTP_OK, TESTS_ROOT, IS_PYOPENSSL, http +from .utils import HTTP_OK, TESTS_ROOT, http - -try: - # Handle OpenSSL errors, if installed. - # See - # noinspection PyUnresolvedReferences - import OpenSSL.SSL - ssl_errors = ( - requests.exceptions.SSLError, - OpenSSL.SSL.Error, - ValueError, # TODO: Remove with OSS-65 - ) -except ImportError: - ssl_errors = ( - requests.exceptions.SSLError, - ) +ssl_errors = ( + niquests.exceptions.SSLError, +) CERTS_ROOT = TESTS_ROOT / 'client_certs' CLIENT_CERT = str(CERTS_ROOT / 'client.crt') @@ -59,10 +47,7 @@ def test_ssl_version(httpbin_secure, ssl_version): ) assert HTTP_OK in r except ssl_errors as e: - if ssl_version == 'ssl3': - # pytest-httpbin doesn't support ssl3 - pass - elif e.__context__ is not None: # Check if root cause was an unsupported TLS version + if e.__context__ is not None: # Check if root cause was an unsupported TLS version root = e.__context__ while root.__context__ is not None: root = root.__context__ @@ -151,7 +136,6 @@ def test_ciphers(httpbin_secure): assert HTTP_OK in r -@pytest.mark.skipif(IS_PYOPENSSL, reason='pyOpenSSL uses a different message format.') def test_ciphers_none_can_be_selected(httpbin_secure): r = http( httpbin_secure.url + '/get', @@ -168,15 +152,6 @@ def test_ciphers_none_can_be_selected(httpbin_secure): assert 'cipher' in r.stderr -def test_pyopenssl_presence(): - if not IS_PYOPENSSL: - assert not urllib3.util.ssl_.IS_PYOPENSSL - assert not urllib3.util.IS_PYOPENSSL - else: - assert urllib3.util.ssl_.IS_PYOPENSSL - assert urllib3.util.IS_PYOPENSSL - - @mock.patch('httpie.cli.argtypes.SSLCredentials._prompt_password', new=lambda self, prompt: PWD_CLIENT_PASS) def test_password_protected_cert_prompt(httpbin_secure): diff --git a/tests/test_stream.py b/tests/test_stream.py index b0b9b8bde8..9dd335f1f5 100644 --- a/tests/test_stream.py +++ b/tests/test_stream.py @@ -72,7 +72,7 @@ def test_pretty_options_with_and_without_stream_with_converter(pretty, stream): body = b'\x00{"foo":42,\n"bar":"baz"}' responses.add(responses.GET, DUMMY_URL, body=body, - stream=True, content_type='json/bytes') + content_type='json/bytes') args = ['--pretty=' + pretty, 'GET', DUMMY_URL] if stream: diff --git a/tests/test_tokens.py b/tests/test_tokens.py index 655445ce49..7001510074 100644 --- a/tests/test_tokens.py +++ b/tests/test_tokens.py @@ -92,10 +92,10 @@ def test_redirected_headers_multipart_no_separator(): def test_verbose_chunked(httpbin_with_chunked_support): - r = http('--verbose', '--chunked', httpbin_with_chunked_support + '/post', 'hello=world') + r = http('-vv', '--chunked', httpbin_with_chunked_support + '/post', 'hello=world') assert HTTP_OK in r assert 'Transfer-Encoding: chunked' in r - assert_output_matches(r, ExpectSequence.TERMINAL_EXCHANGE) + assert_output_matches(r, ExpectSequence.TERMINAL_EXCHANGE_META) def test_request_headers_response_body(httpbin): @@ -115,4 +115,4 @@ def test_request_double_verbose(httpbin): def test_request_meta(httpbin): r = http('--meta', httpbin + '/get') - assert_output_matches(r, [Expect.RESPONSE_META]) + assert_output_matches(r, [Expect.REQUEST_META, Expect.RESPONSE_META]) diff --git a/tests/test_transport_plugin.py b/tests/test_transport_plugin.py index b71592df8d..5f04ec6203 100644 --- a/tests/test_transport_plugin.py +++ b/tests/test_transport_plugin.py @@ -1,8 +1,8 @@ from io import BytesIO -from requests.adapters import BaseAdapter -from requests.models import Response -from requests.utils import get_encoding_from_headers +from niquests.adapters import BaseAdapter +from niquests.models import Response +from niquests.utils import get_encoding_from_headers from httpie.plugins import TransportPlugin from httpie.plugins.registry import plugin_manager diff --git a/tests/test_update_warnings.py b/tests/test_update_warnings.py index e794162649..af528727ff 100644 --- a/tests/test_update_warnings.py +++ b/tests/test_update_warnings.py @@ -197,7 +197,8 @@ def with_warnings(tmp_path): env = PersistentMockEnvironment() env.config['version_info_file'] = tmp_path / 'version.json' env.config['disable_update_warnings'] = False - return env + yield env + env.cleanup(force=True) @pytest.fixture @@ -205,7 +206,8 @@ def without_warnings(tmp_path): env = PersistentMockEnvironment() env.config['version_info_file'] = tmp_path / 'version.json' env.config['disable_update_warnings'] = True - return env + yield env + env.cleanup(force=True) @pytest.fixture @@ -216,7 +218,7 @@ def fetch_update_mock(mocker): @pytest.fixture def static_fetch_data(mocker): - mock_get = mocker.patch('requests.get') + mock_get = mocker.patch('niquests.get') mock_get.return_value.status_code = 200 mock_get.return_value.json.return_value = { BUILD_CHANNEL: HIGHEST_VERSION, diff --git a/tests/test_uploads.py b/tests/test_uploads.py index e6bb80ac70..0ac7365e69 100644 --- a/tests/test_uploads.py +++ b/tests/test_uploads.py @@ -1,9 +1,11 @@ import os import json +import platform import sys import subprocess import time import contextlib + import httpie.__main__ as main import pytest @@ -16,7 +18,7 @@ MockEnvironment, StdinBytesIO, http, HTTP_OK, ) -from .fixtures import FILE_PATH_ARG, FILE_PATH, FILE_CONTENT +from .fixtures import FILE_PATH_ARG, FILE_PATH, FILE_CONTENT, UTF8_IN_NAME_FILE_PATH MAX_RESPONSE_WAIT_TIME = 5 @@ -143,7 +145,14 @@ def test_reading_from_stdin(httpbin, wait): @pytest.mark.requires_external_processes @pytest.mark.skipif(is_windows, reason="Windows doesn't support select() calls into files") +@pytest.mark.xfail( + platform.system() == "Darwin" and os.environ.get("CI") is not None, + reason="GitHub CI and MacOS raises random failures", + strict=False, +) def test_stdin_read_warning(httpbin): + """This test is flaky. Expect random failure in the CI under MacOS. + It's mainly due to the poor VM performance.""" with stdin_processes(httpbin) as (process_1, process_2): # Wait before sending any data time.sleep(1) @@ -254,6 +263,18 @@ def test_multipart(self, httpbin): assert FORM_CONTENT_TYPE not in r assert 'multipart/form-data' in r + def test_multipart_with_rfc2231(self, httpbin): + """Non ascii filename should be encoded properly, following RFC2231, even if it's said + to be half obsolete. HTTP headers don't support officially UTF-8! In 2024...""" + r = http( + '--verbose', + '--multipart', + httpbin + '/post', + f'my_file@{UTF8_IN_NAME_FILE_PATH}', + ) + assert HTTP_OK in r + assert "filename*=utf-8\'\'%E5%A4%A9%E7%8B%97.txt" in r + def test_form_multipart_custom_boundary(self, httpbin): boundary = 'HTTPIE_FTW' r = http( diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index 0a9af608a5..a1c30a4de0 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1,17 +1,18 @@ """Utilities for HTTPie test suite.""" +import contextlib +import os import re import shlex -import os import sys import time import json import tempfile -import warnings import pytest from contextlib import suppress from io import BytesIO from pathlib import Path from typing import Any, Optional, Union, List, Iterable +from shutil import rmtree import httpie.core as core import httpie.manager.__main__ as manager @@ -31,8 +32,6 @@ HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN = 'pie.dev' HTTPBIN_WITH_CHUNKED_SUPPORT = 'http://' + HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN -IS_PYOPENSSL = os.getenv('HTTPIE_TEST_WITH_PYOPENSSL', '0') == '1' - TESTS_ROOT = Path(__file__).parent.parent CRLF = '\r\n' COLOR = '\x1b[' @@ -83,8 +82,8 @@ class Encoder: TEMPLATE = 'hash({})' - STR_PATTERN = re.compile(r'hash\((.*)\)') - BYTES_PATTERN = re.compile(rb'hash\((.*)\)') + STR_PATTERN = re.compile(r'hash\((.*?)\)') + BYTES_PATTERN = re.compile(rb'hash\((.*?)\)') def __init__(self): self.substitutions = {} @@ -125,6 +124,11 @@ class StdinBytesIO(BytesIO): """To be used for `MockEnvironment.stdin`""" len = 0 # See `prepare_request_body()` + def peek(self, size): + buf = self.read(size) + self.seek(0) + return buf + class MockEnvironment(Environment): """Environment subclass with reasonable defaults for testing.""" @@ -132,14 +136,14 @@ class MockEnvironment(Environment): stdin_isatty = True stdout_isatty = True is_windows = False - show_displays = False + show_displays = True def __init__(self, create_temp_config_dir=True, **kwargs): self._encoder = Encoder() if 'stdout' not in kwargs: kwargs['stdout'] = tempfile.NamedTemporaryFile( mode='w+t', - prefix='httpie_stderr', + prefix='httpie_stdout', newline='', encoding=UTF8, ) @@ -170,10 +174,15 @@ def cleanup(self): self.devnull.close() self.stdout.close() self.stderr.close() - warnings.resetwarnings() + if self._orig_stdout and self._orig_stdout != self.stdout: + self._orig_stdout.close() + if self._orig_stderr and self.stderr != self._orig_stderr: + self._orig_stderr.close() + self.devnull.close() + # it breaks without reasons pytest filterwarnings + # warnings.resetwarnings() if self._delete_config_dir: assert self._temp_dir in self.config_dir.parents - from shutil import rmtree rmtree(self.config_dir, ignore_errors=True) def __del__(self): @@ -185,8 +194,16 @@ def __del__(self): class PersistentMockEnvironment(MockEnvironment): - def cleanup(self): - pass + def cleanup(self, *, force: bool = False): + if force: + self.devnull.close() + self.stdout.close() + self.stderr.close() + if self._orig_stdout and self._orig_stdout != self.stdout: + self._orig_stdout.close() + if self._orig_stderr and self.stderr != self._orig_stderr: + self._orig_stderr.close() + self.devnull.close() class BaseCLIResponse: @@ -392,6 +409,7 @@ def http( add_to_args.append('--timeout=3') complete_args = [program_name, *add_to_args, *args] + # print(' '.join(complete_args)) def dump_stderr(): @@ -453,3 +471,22 @@ def dump_stderr(): finally: env.cleanup() + + +@contextlib.contextmanager +def cd_clean_tmp_dir(assert_filenames_after=None): + """Run commands inside a clean temporary directory, and verify created file names.""" + orig_cwd = os.getcwd() + try: + with tempfile.TemporaryDirectory() as tmp_dirname: + os.chdir(tmp_dirname) + assert os.listdir('.') == [] + try: + yield tmp_dirname + actual_filenames = os.listdir('.') + if assert_filenames_after is not None: + assert actual_filenames == assert_filenames_after, (actual_filenames, assert_filenames_after) + finally: + os.chdir(orig_cwd) + except (PermissionError, NotADirectoryError): + pass diff --git a/tests/utils/http_server.py b/tests/utils/http_server.py index 86cc069c57..728946f555 100644 --- a/tests/utils/http_server.py +++ b/tests/utils/http_server.py @@ -135,7 +135,8 @@ def _http_server(): thread = threading.Thread(target=server.serve_forever) thread.start() yield server - server.shutdown() + server.socket.close() + server.shutdown() # shutdown seems only to stop the thread, not closing the socket. thread.join() diff --git a/tests/utils/matching/parsing.py b/tests/utils/matching/parsing.py index e502d76bc8..642d5d6b56 100644 --- a/tests/utils/matching/parsing.py +++ b/tests/utils/matching/parsing.py @@ -8,6 +8,7 @@ SEPARATOR_RE = re.compile(f'^{MESSAGE_SEPARATOR}') KEY_VALUE_RE = re.compile(r'[\n]*((.*?):(.+)[\n]?)+[\n]*') +KEY_VALUE_RE_NO_LF = re.compile(r'((.*?):(.+)(\n))+(\n)') def make_headers_re(message_type: Expect): @@ -18,7 +19,7 @@ def make_headers_re(message_type: Expect): non_crlf = rf'[^{CRLF}]' # language=RegExp - http_version = r'HTTP/\d+\.\d+' + http_version = r'HTTP/((\d+\.\d+)|\d+)' if message_type is Expect.REQUEST_HEADERS: # POST /post HTTP/1.1 start_line_re = fr'{non_crlf}*{http_version}{crlf}' @@ -42,6 +43,7 @@ def make_headers_re(message_type: Expect): CRLF, # Not really but useful for testing (just remember not to include it in a body). ] TOKEN_REGEX_MAP = { + Expect.REQUEST_META: KEY_VALUE_RE_NO_LF, Expect.REQUEST_HEADERS: make_headers_re(Expect.REQUEST_HEADERS), Expect.RESPONSE_HEADERS: make_headers_re(Expect.RESPONSE_HEADERS), Expect.RESPONSE_META: KEY_VALUE_RE, diff --git a/tests/utils/matching/tokens.py b/tests/utils/matching/tokens.py index c82dafedc2..1dfe7d0c57 100644 --- a/tests/utils/matching/tokens.py +++ b/tests/utils/matching/tokens.py @@ -6,6 +6,7 @@ class Expect(Enum): Predefined token types we can expect in the output. """ + REQUEST_META = auto() REQUEST_HEADERS = auto() RESPONSE_HEADERS = auto() RESPONSE_META = auto() @@ -47,6 +48,7 @@ class ExpectSequence: *TERMINAL_RESPONSE, ] TERMINAL_EXCHANGE_META = [ + Expect.REQUEST_META, *TERMINAL_EXCHANGE, Expect.RESPONSE_META ]